From 567ba7650aa698089b3be14f8ca009024474b952 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=E1=B4=87=CA=80=C9=B4=E1=B4=85=20S=E1=B4=84=CA=9C?= =?UTF-8?q?=E1=B4=8F=CA=80=C9=A2=E1=B4=87=CA=80s?= Date: Fri, 12 Aug 2022 11:28:14 +0200 Subject: [PATCH] ci: Move actions to separate repo (#29) * ci: Move actions to separate repo * ci: Remove dependency on main branch --- .github/actions/collect-charts/.gitignore | 2 - .github/actions/collect-charts/action.yml | 44 - .github/actions/collect-charts/dist/index.js | 20624 ----------- .../actions/collect-charts/dist/index.js.map | 1 - .../actions/collect-charts/dist/licenses.txt | 720 - .../collect-charts/dist/sourcemap-register.js | 1 - .../actions/collect-charts/package-lock.json | 567 - .github/actions/collect-charts/package.json | 25 - .github/actions/collect-charts/src/main.ts | 205 - .github/actions/collect-charts/tsconfig.json | 12 - .../actions/verify-chart-changelog/.gitignore | 2 - .../actions/verify-chart-changelog/action.yml | 16 - .../verify-chart-changelog/dist/index.js | 28244 ---------------- .../verify-chart-changelog/dist/index.js.map | 1 - .../verify-chart-changelog/dist/licenses.txt | 836 - .../dist/sourcemap-register.js | 1 - .../verify-chart-changelog/package-lock.json | 803 - .../verify-chart-changelog/package.json | 30 - .../verify-chart-changelog/src/main.ts | 149 - .../verify-chart-changelog/src/schemas.ts | 44 - .../verify-chart-changelog/tsconfig.json | 12 - .../actions/verify-chart-version/.gitignore | 2 - .../actions/verify-chart-version/action.yml | 16 - .../verify-chart-version/dist/index.js | 23659 ------------- .../verify-chart-version/dist/index.js.map | 1 - .../verify-chart-version/dist/licenses.txt | 777 - .../dist/sourcemap-register.js | 1 - .../verify-chart-version/package-lock.json | 632 - .../actions/verify-chart-version/package.json | 27 - .../actions/verify-chart-version/src/main.ts | 106 - .../verify-chart-version/tsconfig.json | 12 - .github/workflows/charts-lint.yaml | 4 +- .github/workflows/charts-release.yaml | 4 +- .github/workflows/pr-metadata.yaml | 2 +- .github/workflows/pr-validate.yaml | 10 +- 35 files changed, 10 insertions(+), 77582 deletions(-) delete mode 100644 .github/actions/collect-charts/.gitignore delete mode 100644 .github/actions/collect-charts/action.yml delete mode 100644 .github/actions/collect-charts/dist/index.js delete mode 100644 .github/actions/collect-charts/dist/index.js.map delete mode 100644 .github/actions/collect-charts/dist/licenses.txt delete mode 100644 .github/actions/collect-charts/dist/sourcemap-register.js delete mode 100644 .github/actions/collect-charts/package-lock.json delete mode 100644 .github/actions/collect-charts/package.json delete mode 100644 .github/actions/collect-charts/src/main.ts delete mode 100644 .github/actions/collect-charts/tsconfig.json delete mode 100644 .github/actions/verify-chart-changelog/.gitignore delete mode 100644 .github/actions/verify-chart-changelog/action.yml delete mode 100644 .github/actions/verify-chart-changelog/dist/index.js delete mode 100644 .github/actions/verify-chart-changelog/dist/index.js.map delete mode 100644 .github/actions/verify-chart-changelog/dist/licenses.txt delete mode 100644 .github/actions/verify-chart-changelog/dist/sourcemap-register.js delete mode 100644 .github/actions/verify-chart-changelog/package-lock.json delete mode 100644 .github/actions/verify-chart-changelog/package.json delete mode 100644 .github/actions/verify-chart-changelog/src/main.ts delete mode 100644 .github/actions/verify-chart-changelog/src/schemas.ts delete mode 100644 .github/actions/verify-chart-changelog/tsconfig.json delete mode 100644 .github/actions/verify-chart-version/.gitignore delete mode 100644 .github/actions/verify-chart-version/action.yml delete mode 100644 .github/actions/verify-chart-version/dist/index.js delete mode 100644 .github/actions/verify-chart-version/dist/index.js.map delete mode 100644 .github/actions/verify-chart-version/dist/licenses.txt delete mode 100644 .github/actions/verify-chart-version/dist/sourcemap-register.js delete mode 100644 .github/actions/verify-chart-version/package-lock.json delete mode 100644 .github/actions/verify-chart-version/package.json delete mode 100644 .github/actions/verify-chart-version/src/main.ts delete mode 100644 .github/actions/verify-chart-version/tsconfig.json diff --git a/.github/actions/collect-charts/.gitignore b/.github/actions/collect-charts/.gitignore deleted file mode 100644 index 3b4e8dcf..00000000 --- a/.github/actions/collect-charts/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -lib/ -node_modules/ diff --git a/.github/actions/collect-charts/action.yml b/.github/actions/collect-charts/action.yml deleted file mode 100644 index b93af09d..00000000 --- a/.github/actions/collect-charts/action.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: 'Collect charts' -description: 'Collects Helm charts based on the inputs and returns the lint/test matrix' -inputs: - token: - description: > - GitHub token for GitHub API requests. - required: true - default: '${{ github.token }}' - chartsFolder: - description: > - Base folder containing all the charts - required: true - default: 'charts' - repoConfigFile: - description: > - Path to the repo config file that contains which charts - should be excluded from linting/testing. - required: true - default: '.ci/repo-config.yaml' - getAllCharts: - description: > - Instead of finding the changed charts, return - all the charts - required: false - default: 'false' - overrideCharts: - description: > - A JSON encoded array of charts to return instead of finding - the changed charts - required: false - default: '[]' -outputs: - charts: - description: > - An array of all the Helm charts that contain changes - chartsToInstall: - description: > - An array of the charts that should be installed - chartsToLint: - description: > - An array of the charts that should be linted -runs: - using: 'node16' - main: 'dist/index.js' diff --git a/.github/actions/collect-charts/dist/index.js b/.github/actions/collect-charts/dist/index.js deleted file mode 100644 index dfa118b7..00000000 --- a/.github/actions/collect-charts/dist/index.js +++ /dev/null @@ -1,20624 +0,0 @@ -require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap -/******/ var __webpack_modules__ = ({ - -/***/ 3109: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -const path = __importStar(__nccwpck_require__(5622)); -const YAML = __importStar(__nccwpck_require__(5065)); -const fs = __importStar(__nccwpck_require__(5630)); -function getErrorMessage(error) { - if (error instanceof Error) - return error.message; - return String(error); -} -function requestAddedModifiedFiles(baseCommit, headCommit, githubToken) { - return __awaiter(this, void 0, void 0, function* () { - let result = []; - const octokit = github.getOctokit(githubToken); - core.info(`Base commit: ${baseCommit}`); - core.info(`Head commit: ${headCommit}`); - // Use GitHub's compare two commits API. - const response = yield octokit.rest.repos.compareCommits({ - base: baseCommit, - head: headCommit, - owner: github.context.repo.owner, - repo: github.context.repo.repo, - }); - // Ensure that the request was successful. - if (response.status !== 200) { - throw new Error(`The GitHub API returned ${response.status}, expected 200.`); - } - // Ensure that the head commit is ahead of the base commit. - if (response.data.status !== "ahead") { - throw new Error(`The head commit for this ${github.context.eventName} event is not ahead of the base commit.`); - } - const responseFiles = response.data.files || []; - responseFiles.forEach((file) => { - const filestatus = file.status; - if (filestatus == "added" || filestatus == "modified") { - result.push(file.filename); - } - }); - return result; - }); -} -function requestAllFiles(commit, githubToken) { - return __awaiter(this, void 0, void 0, function* () { - let result = []; - const octokit = github.getOctokit(githubToken); - core.info(`Commit SHA: ${commit}`); - const response = yield octokit.rest.git.getTree({ - tree_sha: commit, - owner: github.context.repo.owner, - repo: github.context.repo.repo, - recursive: "true", - }); - // Ensure that the request was successful. - if (response.status !== 200) { - throw new Error(`The GitHub API returned ${response.status}, expected 200.`); - } - const responseTreeItems = response.data.tree || []; - responseTreeItems.forEach((item) => { - if (item.type == "blob" && item.path) { - result.push(item.path); - } - }); - return result; - }); -} -function getRepoConfig(configPath) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure that the repo config file exists. - if (!(yield fs.pathExists(configPath))) { - throw new Error(`${configPath} Does not exist!`); - } - const repoConfigRaw = yield fs.readFile(configPath, "utf8"); - const repoConfig = yield YAML.parse(repoConfigRaw); - return repoConfig; - }); -} -function filterChangedCharts(files, parentFolder) { - const filteredChartFiles = files.filter((file) => { - const rel = path.relative(parentFolder, file); - return !rel.startsWith("../") && rel !== ".."; - }); - let changedCharts = []; - filteredChartFiles.forEach((file) => { - const absoluteParentFolder = path.resolve(parentFolder); - const absoluteFileDirname = path.resolve(path.dirname(file)); - const relativeFileDirname = absoluteFileDirname.slice(absoluteParentFolder.length + 1); - const chartPathParts = relativeFileDirname.split("/"); - const chartType = chartPathParts[0]; - const chartName = chartPathParts[1]; - if (chartType && chartName) { - changedCharts.push(`${chartType}/${chartName}`); - } - }); - // Return only unique items - return changedCharts.filter((item, index) => changedCharts.indexOf(item) === index); -} -function run() { - var _a, _b, _c, _d; - return __awaiter(this, void 0, void 0, function* () { - try { - const githubToken = core.getInput("token", { required: true }); - const chartsFolder = core.getInput("chartsFolder", { required: true }); - const repoConfigFilePath = core.getInput("repoConfigFile", { - required: true, - }); - let getAllCharts = core.getInput("getAllCharts", { required: false }); - const overrideCharts = core.getInput("overrideCharts", { required: false }); - const repoConfig = yield getRepoConfig(repoConfigFilePath); - core.info(`Repo configuration: ${JSON.stringify(repoConfig, undefined, 2)}`); - if (overrideCharts && overrideCharts != "[]") { - const responseCharts = YAML.parse(overrideCharts); - core.info(`Charts: ${JSON.stringify(responseCharts, undefined, 2)}`); - core.setOutput("charts", responseCharts); - return; - } - const eventName = github.context.eventName; - let baseCommit; - let headCommit; - switch (eventName) { - case "pull_request": - baseCommit = (_b = (_a = github.context.payload.pull_request) === null || _a === void 0 ? void 0 : _a.base) === null || _b === void 0 ? void 0 : _b.sha; - headCommit = (_d = (_c = github.context.payload.pull_request) === null || _c === void 0 ? void 0 : _c.head) === null || _d === void 0 ? void 0 : _d.sha; - break; - case "push": - baseCommit = github.context.payload.before; - headCommit = github.context.payload.after; - break; - case "workflow_dispatch": - getAllCharts = "true"; - baseCommit = ""; - headCommit = github.context.sha; - break; - default: - throw new Error(`This action only supports pull requests, pushes and workflow_dispatch,` + - `${github.context.eventName} events are not supported.`); - } - let responseFiles; - if (getAllCharts === "true") { - responseFiles = yield requestAllFiles(headCommit, githubToken); - } - else { - responseFiles = yield requestAddedModifiedFiles(baseCommit, headCommit, githubToken); - } - const changedCharts = filterChangedCharts(responseFiles, chartsFolder); - const chartsToInstall = changedCharts.filter((x) => !repoConfig["excluded-charts-install"].includes(x)); - const chartsToLint = changedCharts.filter((x) => !repoConfig["excluded-charts-lint"].includes(x)); - core.info(`Charts: ${JSON.stringify(changedCharts, undefined, 2)}`); - core.info(`Charts to lint: ${JSON.stringify(chartsToLint, undefined, 2)}`); - core.info(`Charts to install: ${JSON.stringify(chartsToInstall, undefined, 2)}`); - core.setOutput("charts", changedCharts); - core.setOutput("chartsToInstall", chartsToInstall); - core.setOutput("chartsToLint", chartsToLint); - } - catch (error) { - core.setFailed(getErrorMessage(error)); - } - }); -} -run(); - - -/***/ }), - -/***/ 7351: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 2186: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(7351); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2087)); -const path = __importStar(__nccwpck_require__(5622)); -const oidc_utils_1 = __nccwpck_require__(8041); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); - } -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueCommand('PATH', inputPath); - } - else { - command_1.issueCommand('add-path', {}, inputPath); - } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Gets the values of an multiline input. Each value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string[] - * - */ -function getMultilineInput(name, options) { - const inputs = getInput(name, options) - .split('\n') - .filter(x => x !== ''); - return inputs; -} -exports.getMultilineInput = getMultilineInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds a warning issue - * @param message warning issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Adds a notice issue - * @param message notice issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.notice = notice; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -function getIDToken(aud) { - return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); -} -exports.getIDToken = getIDToken; -/** - * Summary exports - */ -var summary_1 = __nccwpck_require__(1327); -Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); -/** - * @deprecated use core.summary - */ -var summary_2 = __nccwpck_require__(1327); -Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); -/** - * Path exports - */ -var path_utils_1 = __nccwpck_require__(2981); -Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); -Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); -Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 717: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(5747)); -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map - -/***/ }), - -/***/ 8041: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); -class OidcClient { - static createHttpClient(allowRetry = true, maxRetry = 10) { - const requestOptions = { - allowRetries: allowRetry, - maxRetries: maxRetry - }; - return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); - } - static getRequestToken() { - const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); - } - return token; - } - static getIDTokenUrl() { - const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); - } - return runtimeUrl; - } - static getCall(id_token_url) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const httpclient = OidcClient.createHttpClient(); - const res = yield httpclient - .getJson(id_token_url) - .catch(error => { - throw new Error(`Failed to get ID Token. \n - Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); - }); - const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; - if (!id_token) { - throw new Error('Response json body do not have ID Token field'); - } - return id_token; - }); - } - static getIDToken(audience) { - return __awaiter(this, void 0, void 0, function* () { - try { - // New ID Token is requested from action service - let id_token_url = OidcClient.getIDTokenUrl(); - if (audience) { - const encodedAudience = encodeURIComponent(audience); - id_token_url = `${id_token_url}&audience=${encodedAudience}`; - } - core_1.debug(`ID token url is ${id_token_url}`); - const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); - return id_token; - } - catch (error) { - throw new Error(`Error message: ${error.message}`); - } - }); - } -} -exports.OidcClient = OidcClient; -//# sourceMappingURL=oidc-utils.js.map - -/***/ }), - -/***/ 2981: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(5622)); -/** - * toPosixPath converts the given path to the posix form. On Windows, \\ will be - * replaced with /. - * - * @param pth. Path to transform. - * @return string Posix path. - */ -function toPosixPath(pth) { - return pth.replace(/[\\]/g, '/'); -} -exports.toPosixPath = toPosixPath; -/** - * toWin32Path converts the given path to the win32 form. On Linux, / will be - * replaced with \\. - * - * @param pth. Path to transform. - * @return string Win32 path. - */ -function toWin32Path(pth) { - return pth.replace(/[/]/g, '\\'); -} -exports.toWin32Path = toWin32Path; -/** - * toPlatformPath converts the given path to a platform-specific path. It does - * this by replacing instances of / and \ with the platform-specific path - * separator. - * - * @param pth The path to platformize. - * @return string The platform-specific path. - */ -function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path.sep); -} -exports.toPlatformPath = toPlatformPath; -//# sourceMappingURL=path-utils.js.map - -/***/ }), - -/***/ 1327: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(2087); -const fs_1 = __nccwpck_require__(5747); -const { access, appendFile, writeFile } = fs_1.promises; -exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; -exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; -class Summary { - constructor() { - this._buffer = ''; - } - /** - * Finds the summary file path from the environment, rejects if env var is not found or file does not exist - * Also checks r/w permissions. - * - * @returns step summary file path - */ - filePath() { - return __awaiter(this, void 0, void 0, function* () { - if (this._filePath) { - return this._filePath; - } - const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; - if (!pathFromEnv) { - throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); - } - try { - yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); - } - catch (_a) { - throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); - } - this._filePath = pathFromEnv; - return this._filePath; - }); - } - /** - * Wraps content in an HTML tag, adding any HTML attributes - * - * @param {string} tag HTML tag to wrap - * @param {string | null} content content within the tag - * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add - * - * @returns {string} content wrapped in HTML element - */ - wrap(tag, content, attrs = {}) { - const htmlAttrs = Object.entries(attrs) - .map(([key, value]) => ` ${key}="${value}"`) - .join(''); - if (!content) { - return `<${tag}${htmlAttrs}>`; - } - return `<${tag}${htmlAttrs}>${content}`; - } - /** - * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. - * - * @param {SummaryWriteOptions} [options] (optional) options for write operation - * - * @returns {Promise} summary instance - */ - write(options) { - return __awaiter(this, void 0, void 0, function* () { - const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); - const filePath = yield this.filePath(); - const writeFunc = overwrite ? writeFile : appendFile; - yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); - return this.emptyBuffer(); - }); - } - /** - * Clears the summary buffer and wipes the summary file - * - * @returns {Summary} summary instance - */ - clear() { - return __awaiter(this, void 0, void 0, function* () { - return this.emptyBuffer().write({ overwrite: true }); - }); - } - /** - * Returns the current summary buffer as a string - * - * @returns {string} string of summary buffer - */ - stringify() { - return this._buffer; - } - /** - * If the summary buffer is empty - * - * @returns {boolen} true if the buffer is empty - */ - isEmptyBuffer() { - return this._buffer.length === 0; - } - /** - * Resets the summary buffer without writing to summary file - * - * @returns {Summary} summary instance - */ - emptyBuffer() { - this._buffer = ''; - return this; - } - /** - * Adds raw text to the summary buffer - * - * @param {string} text content to add - * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) - * - * @returns {Summary} summary instance - */ - addRaw(text, addEOL = false) { - this._buffer += text; - return addEOL ? this.addEOL() : this; - } - /** - * Adds the operating system-specific end-of-line marker to the buffer - * - * @returns {Summary} summary instance - */ - addEOL() { - return this.addRaw(os_1.EOL); - } - /** - * Adds an HTML codeblock to the summary buffer - * - * @param {string} code content to render within fenced code block - * @param {string} lang (optional) language to syntax highlight code - * - * @returns {Summary} summary instance - */ - addCodeBlock(code, lang) { - const attrs = Object.assign({}, (lang && { lang })); - const element = this.wrap('pre', this.wrap('code', code), attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML list to the summary buffer - * - * @param {string[]} items list of items to render - * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) - * - * @returns {Summary} summary instance - */ - addList(items, ordered = false) { - const tag = ordered ? 'ol' : 'ul'; - const listItems = items.map(item => this.wrap('li', item)).join(''); - const element = this.wrap(tag, listItems); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML table to the summary buffer - * - * @param {SummaryTableCell[]} rows table rows - * - * @returns {Summary} summary instance - */ - addTable(rows) { - const tableBody = rows - .map(row => { - const cells = row - .map(cell => { - if (typeof cell === 'string') { - return this.wrap('td', cell); - } - const { header, data, colspan, rowspan } = cell; - const tag = header ? 'th' : 'td'; - const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); - return this.wrap(tag, data, attrs); - }) - .join(''); - return this.wrap('tr', cells); - }) - .join(''); - const element = this.wrap('table', tableBody); - return this.addRaw(element).addEOL(); - } - /** - * Adds a collapsable HTML details element to the summary buffer - * - * @param {string} label text for the closed state - * @param {string} content collapsable content - * - * @returns {Summary} summary instance - */ - addDetails(label, content) { - const element = this.wrap('details', this.wrap('summary', label) + content); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML image tag to the summary buffer - * - * @param {string} src path to the image you to embed - * @param {string} alt text description of the image - * @param {SummaryImageOptions} options (optional) addition image attributes - * - * @returns {Summary} summary instance - */ - addImage(src, alt, options) { - const { width, height } = options || {}; - const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); - const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML section heading element - * - * @param {string} text heading text - * @param {number | string} [level=1] (optional) the heading level, default: 1 - * - * @returns {Summary} summary instance - */ - addHeading(text, level) { - const tag = `h${level}`; - const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) - ? tag - : 'h1'; - const element = this.wrap(allowedTag, text); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML thematic break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addSeparator() { - const element = this.wrap('hr', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML line break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addBreak() { - const element = this.wrap('br', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML blockquote to the summary buffer - * - * @param {string} text quote text - * @param {string} cite (optional) citation url - * - * @returns {Summary} summary instance - */ - addQuote(text, cite) { - const attrs = Object.assign({}, (cite && { cite })); - const element = this.wrap('blockquote', text, attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML anchor tag to the summary buffer - * - * @param {string} text link text/content - * @param {string} href hyperlink - * - * @returns {Summary} summary instance - */ - addLink(text, href) { - const element = this.wrap('a', text, { href }); - return this.addRaw(element).addEOL(); - } -} -const _summary = new Summary(); -/** - * @deprecated use `core.summary` - */ -exports.markdownSummary = _summary; -exports.summary = _summary; -//# sourceMappingURL=summary.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandProperties = exports.toCommandValue = void 0; -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -/** - * - * @param annotationProperties - * @returns The command properties to send with the actual annotation command - * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 - */ -function toCommandProperties(annotationProperties) { - if (!Object.keys(annotationProperties).length) { - return {}; - } - return { - title: annotationProperties.title, - file: annotationProperties.file, - line: annotationProperties.startLine, - endLine: annotationProperties.endLine, - col: annotationProperties.startColumn, - endColumn: annotationProperties.endColumn - }; -} -exports.toCommandProperties = toCommandProperties; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(5747); -const os_1 = __nccwpck_require__(2087); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 5438: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); -} -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 7914: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6255)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 3030: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); -// octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 5526: -/***/ (function(__unused_webpack_module, exports) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map - -/***/ }), - -/***/ 6255: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(8605)); -const https = __importStar(__nccwpck_require__(7211)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } - } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); - } -} -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 9835: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; -//# sourceMappingURL=proxy.js.map - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -const Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] - }, - dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], - addEmailForAuthenticatedUser: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { - rest: api - }); -} -legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - let headers; - - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } // redact request credentials without mutating original request options - - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - - Object.defineProperty(this, "code", { - get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); - return headers || {}; - } - - }); - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); - -const VERSION = "5.6.3"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: undefined - }, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - - if (status >= 400) { - const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - - return getResponseData(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { - request: requestOptions - }); - }); -} - -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); -} - -function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; - } - - return data.message; - } // istanbul ignore next - just in case - - - return `Unknown error: ${JSON.stringify(data)}`; -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var register = __nccwpck_require__(4670) -var addHook = __nccwpck_require__(5549) -var removeHook = __nccwpck_require__(6819) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 5549: -/***/ ((module) => { - -module.exports = addHook; - -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } - - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; - } - - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; - } - - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; - } - - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} - - -/***/ }), - -/***/ 4670: -/***/ ((module) => { - -module.exports = register; - -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - - if (!options) { - options = {}; - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); - } - - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } - - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); -} - - -/***/ }), - -/***/ 6819: -/***/ ((module) => { - -module.exports = removeHook; - -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - - var index = state.registry[name] - .map(function (registered) { - return registered.orig; - }) - .indexOf(method); - - if (index === -1) { - return; - } - - state.registry[name].splice(index, 1); -} - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 9618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirsSync = __nccwpck_require__(2915).mkdirsSync -const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync -const stat = __nccwpck_require__(3901) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0002' - ) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - return handleFilterAndCopy(destStat, src, dest, opts) -} - -function handleFilterAndCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return startCopy(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), - -/***/ 8834: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirs = __nccwpck_require__(2915).mkdirs -const pathExists = __nccwpck_require__(3835).pathExists -const utimesMillis = __nccwpck_require__(2548).utimesMillis -const stat = __nccwpck_require__(3901) - -function copy (src, dest, opts, cb) { - if (typeof opts === 'function' && !cb) { - cb = opts - opts = {} - } else if (typeof opts === 'function') { - opts = { filter: opts } - } - - cb = cb || function () {} - opts = opts || {} - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0001' - ) - } - - stat.checkPaths(src, dest, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'copy', err => { - if (err) return cb(err) - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) - return checkParentDir(destStat, src, dest, opts, cb) - }) - }) -} - -function checkParentDir (destStat, src, dest, opts, cb) { - const destParent = path.dirname(dest) - pathExists(destParent, (err, dirExists) => { - if (err) return cb(err) - if (dirExists) return getStats(destStat, src, dest, opts, cb) - mkdirs(destParent, err => { - if (err) return cb(err) - return getStats(destStat, src, dest, opts, cb) - }) - }) -} - -function handleFilter (onInclude, destStat, src, dest, opts, cb) { - Promise.resolve(opts.filter(src, dest)).then(include => { - if (include) return onInclude(destStat, src, dest, opts, cb) - return cb() - }, error => cb(error)) -} - -function startCopy (destStat, src, dest, opts, cb) { - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) - return getStats(destStat, src, dest, opts, cb) -} - -function getStats (destStat, src, dest, opts, cb) { - const stat = opts.dereference ? fs.stat : fs.lstat - stat(src, (err, srcStat) => { - if (err) return cb(err) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) - else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) - else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) - return cb(new Error(`Unknown file: ${src}`)) - }) -} - -function onFile (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return copyFile(srcStat, src, dest, opts, cb) - return mayCopyFile(srcStat, src, dest, opts, cb) -} - -function mayCopyFile (srcStat, src, dest, opts, cb) { - if (opts.overwrite) { - fs.unlink(dest, err => { - if (err) return cb(err) - return copyFile(srcStat, src, dest, opts, cb) - }) - } else if (opts.errorOnExist) { - return cb(new Error(`'${dest}' already exists`)) - } else return cb() -} - -function copyFile (srcStat, src, dest, opts, cb) { - fs.copyFile(src, dest, err => { - if (err) return cb(err) - if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) - return setDestMode(dest, srcStat.mode, cb) - }) -} - -function handleTimestampsAndMode (srcMode, src, dest, cb) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - return makeFileWritable(dest, srcMode, err => { - if (err) return cb(err) - return setDestTimestampsAndMode(srcMode, src, dest, cb) - }) - } - return setDestTimestampsAndMode(srcMode, src, dest, cb) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode, cb) { - return setDestMode(dest, srcMode | 0o200, cb) -} - -function setDestTimestampsAndMode (srcMode, src, dest, cb) { - setDestTimestamps(src, dest, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) -} - -function setDestMode (dest, srcMode, cb) { - return fs.chmod(dest, srcMode, cb) -} - -function setDestTimestamps (src, dest, cb) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - fs.stat(src, (err, updatedSrcStat) => { - if (err) return cb(err) - return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) - }) -} - -function onDir (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) - return copyDir(src, dest, opts, cb) -} - -function mkDirAndCopy (srcMode, src, dest, opts, cb) { - fs.mkdir(dest, err => { - if (err) return cb(err) - copyDir(src, dest, opts, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) - }) -} - -function copyDir (src, dest, opts, cb) { - fs.readdir(src, (err, items) => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) -} - -function copyDirItems (items, src, dest, opts, cb) { - const item = items.pop() - if (!item) return cb() - return copyDirItem(items, item, src, dest, opts, cb) -} - -function copyDirItem (items, item, src, dest, opts, cb) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { destStat } = stats - startCopy(destStat, srcItem, destItem, opts, err => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) - }) -} - -function onLink (destStat, src, dest, opts, cb) { - fs.readlink(src, (err, resolvedSrc) => { - if (err) return cb(err) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlink(resolvedSrc, dest, cb) - } else { - fs.readlink(dest, (err, resolvedDest) => { - if (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) - return cb(err) - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) - } - return copyLink(resolvedSrc, dest, cb) - }) - } - }) -} - -function copyLink (resolvedSrc, dest, cb) { - fs.unlink(dest, err => { - if (err) return cb(err) - return fs.symlink(resolvedSrc, dest, cb) - }) -} - -module.exports = copy - - -/***/ }), - -/***/ 1335: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - copy: u(__nccwpck_require__(8834)), - copySync: __nccwpck_require__(9618) -} - - -/***/ }), - -/***/ 6970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const remove = __nccwpck_require__(7357) - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} - - -/***/ }), - -/***/ 2164: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) - -function createFile (file, callback) { - function makeFile () { - fs.writeFile(file, '', err => { - if (err) return callback(err) - callback() - }) - } - - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err - if (!err && stats.isFile()) return callback() - const dir = path.dirname(file) - fs.stat(dir, (err, stats) => { - if (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - return mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeFile() - }) - } - return callback(err) - } - - if (stats.isDirectory()) makeFile() - else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdir(dir, err => { - if (err) return callback(err) - }) - } - }) - }) -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch {} - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), - -/***/ 55: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { createFile, createFileSync } = __nccwpck_require__(2164) -const { createLink, createLinkSync } = __nccwpck_require__(3797) -const { createSymlink, createSymlinkSync } = __nccwpck_require__(2549) - -module.exports = { - // file - createFile, - createFileSync, - ensureFile: createFile, - ensureFileSync: createFileSync, - // link - createLink, - createLinkSync, - ensureLink: createLink, - ensureLinkSync: createLinkSync, - // symlink - createSymlink, - createSymlinkSync, - ensureSymlink: createSymlink, - ensureSymlinkSync: createSymlinkSync -} - - -/***/ }), - -/***/ 3797: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists -const { areIdentical } = __nccwpck_require__(3901) - -function createLink (srcpath, dstpath, callback) { - function makeLink (srcpath, dstpath) { - fs.link(srcpath, dstpath, err => { - if (err) return callback(err) - callback(null) - }) - } - - fs.lstat(dstpath, (_, dstStat) => { - fs.lstat(srcpath, (err, srcStat) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureLink') - return callback(err) - } - if (dstStat && areIdentical(srcStat, dstStat)) return callback(null) - - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return makeLink(srcpath, dstpath) - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeLink(srcpath, dstpath) - }) - }) - }) - }) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} - - -/***/ }), - -/***/ 3727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const pathExists = __nccwpck_require__(3835).pathExists - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -function symlinkPaths (srcpath, dstpath, callback) { - if (path.isAbsolute(srcpath)) { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: srcpath - }) - }) - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - return pathExists(relativeToDst, (err, exists) => { - if (err) return callback(err) - if (exists) { - return callback(null, { - toCwd: relativeToDst, - toDst: srcpath - }) - } else { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - }) - }) - } - }) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - let exists - if (path.isAbsolute(srcpath)) { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } else { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } - } - } -} - -module.exports = { - symlinkPaths, - symlinkPathsSync -} - - -/***/ }), - -/***/ 8254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function symlinkType (srcpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - if (type) return callback(null, type) - fs.lstat(srcpath, (err, stats) => { - if (err) return callback(null, 'file') - type = (stats && stats.isDirectory()) ? 'dir' : 'file' - callback(null, type) - }) -} - -function symlinkTypeSync (srcpath, type) { - let stats - - if (type) return type - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType, - symlinkTypeSync -} - - -/***/ }), - -/***/ 2549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(1176) -const _mkdirs = __nccwpck_require__(2915) -const mkdirs = _mkdirs.mkdirs -const mkdirsSync = _mkdirs.mkdirsSync - -const _symlinkPaths = __nccwpck_require__(3727) -const symlinkPaths = _symlinkPaths.symlinkPaths -const symlinkPathsSync = _symlinkPaths.symlinkPathsSync - -const _symlinkType = __nccwpck_require__(8254) -const symlinkType = _symlinkType.symlinkType -const symlinkTypeSync = _symlinkType.symlinkTypeSync - -const pathExists = __nccwpck_require__(3835).pathExists - -const { areIdentical } = __nccwpck_require__(3901) - -function createSymlink (srcpath, dstpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - - fs.lstat(dstpath, (err, stats) => { - if (!err && stats.isSymbolicLink()) { - Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]).then(([srcStat, dstStat]) => { - if (areIdentical(srcStat, dstStat)) return callback(null) - _createSymlink(srcpath, dstpath, type, callback) - }) - } else _createSymlink(srcpath, dstpath, type, callback) - }) -} - -function _createSymlink (srcpath, dstpath, type, callback) { - symlinkPaths(srcpath, dstpath, (err, relative) => { - if (err) return callback(err) - srcpath = relative.toDst - symlinkType(relative.toCwd, type, (err, type) => { - if (err) return callback(err) - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) - mkdirs(dir, err => { - if (err) return callback(err) - fs.symlink(srcpath, dstpath, type, callback) - }) - }) - }) - }) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch {} - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} - - -/***/ }), - -/***/ 1176: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.opendir was added in Node.js v12.12.0 - // fs.rm was added in Node.js v14.14.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// fs.writev only available in Node v12.9.0+ -if (typeof fs.writev === 'function') { - // Function signature is - // s.writev(fd, buffers[, position], callback) - // We need to handle the optional arg, so we use ...args - exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) - } -} - -// fs.realpath.native sometimes not available if fs is monkey-patched -if (typeof fs.realpath.native === 'function') { - exports.realpath.native = u(fs.realpath.native) -} else { - process.emitWarning( - 'fs.realpath.native is not a function. Is fs being monkey-patched?', - 'Warning', 'fs-extra-WARN0003' - ) -} - - -/***/ }), - -/***/ 5630: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__nccwpck_require__(1176), - // Export extra methods: - ...__nccwpck_require__(1335), - ...__nccwpck_require__(6970), - ...__nccwpck_require__(55), - ...__nccwpck_require__(213), - ...__nccwpck_require__(2915), - ...__nccwpck_require__(1497), - ...__nccwpck_require__(1832), - ...__nccwpck_require__(3835), - ...__nccwpck_require__(7357) -} - - -/***/ }), - -/***/ 213: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const jsonFile = __nccwpck_require__(8970) - -jsonFile.outputJson = u(__nccwpck_require__(531)) -jsonFile.outputJsonSync = __nccwpck_require__(9421) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), - -/***/ 8970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const jsonFile = __nccwpck_require__(6160) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), - -/***/ 9421: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFileSync } = __nccwpck_require__(1832) - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync - - -/***/ }), - -/***/ 531: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFile } = __nccwpck_require__(1832) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson - - -/***/ }), - -/***/ 2915: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), - -/***/ 2751: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(1176) -const { checkPath } = __nccwpck_require__(9907) - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} - - -/***/ }), - -/***/ 9907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const path = __nccwpck_require__(5622) - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - - -/***/ }), - -/***/ 1497: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - move: u(__nccwpck_require__(2231)), - moveSync: __nccwpck_require__(2047) -} - - -/***/ }), - -/***/ 2047: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copySync = __nccwpck_require__(1335).copySync -const removeSync = __nccwpck_require__(7357).removeSync -const mkdirpSync = __nccwpck_require__(2915).mkdirpSync -const stat = __nccwpck_require__(3901) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), - -/***/ 2231: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copy = __nccwpck_require__(1335).copy -const remove = __nccwpck_require__(7357).remove -const mkdirp = __nccwpck_require__(2915).mkdirp -const pathExists = __nccwpck_require__(3835).pathExists -const stat = __nccwpck_require__(3901) - -function move (src, dest, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - opts = opts || {} - - const overwrite = opts.overwrite || opts.clobber || false - - stat.checkPaths(src, dest, 'move', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, isChangingCase = false } = stats - stat.checkParentPaths(src, srcStat, dest, 'move', err => { - if (err) return cb(err) - if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) - mkdirp(path.dirname(dest), err => { - if (err) return cb(err) - return doRename(src, dest, overwrite, isChangingCase, cb) - }) - }) - }) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase, cb) { - if (isChangingCase) return rename(src, dest, overwrite, cb) - if (overwrite) { - return remove(dest, err => { - if (err) return cb(err) - return rename(src, dest, overwrite, cb) - }) - } - pathExists(dest, (err, destExists) => { - if (err) return cb(err) - if (destExists) return cb(new Error('dest already exists.')) - return rename(src, dest, overwrite, cb) - }) -} - -function rename (src, dest, overwrite, cb) { - fs.rename(src, dest, err => { - if (!err) return cb() - if (err.code !== 'EXDEV') return cb(err) - return moveAcrossDevice(src, dest, overwrite, cb) - }) -} - -function moveAcrossDevice (src, dest, overwrite, cb) { - const opts = { - overwrite, - errorOnExist: true - } - copy(src, dest, opts, err => { - if (err) return cb(err) - return remove(src, cb) - }) -} - -module.exports = move - - -/***/ }), - -/***/ 1832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists - -function outputFile (file, data, encoding, callback) { - if (typeof encoding === 'function') { - callback = encoding - encoding = 'utf8' - } - - const dir = path.dirname(file) - pathExists(dir, (err, itDoes) => { - if (err) return callback(err) - if (itDoes) return fs.writeFile(file, data, encoding, callback) - - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - - fs.writeFile(file, data, encoding, callback) - }) - }) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (fs.existsSync(dir)) { - return fs.writeFileSync(file, ...args) - } - mkdir.mkdirsSync(dir) - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), - -/***/ 3835: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), - -/***/ 7357: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const u = __nccwpck_require__(9046).fromCallback -const rimraf = __nccwpck_require__(7247) - -function remove (path, callback) { - // Node 14.14.0+ - if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback) - rimraf(path, callback) -} - -function removeSync (path) { - // Node 14.14.0+ - if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true }) - rimraf.sync(path) -} - -module.exports = { - remove: u(remove), - removeSync -} - - -/***/ }), - -/***/ 7247: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const assert = __nccwpck_require__(2357) - -const isWindows = (process.platform === 'win32') - -function defaults (options) { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 -} - -function rimraf (p, options, cb) { - let busyTries = 0 - - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && - busyTries < options.maxBusyTries) { - busyTries++ - const time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), time) - } - - // already gone - if (er.code === 'ENOENT') er = null - } - - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === 'ENOENT') { - return cb(null) - } - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === 'EPERM' && isWindows) { - return fixWinEPERM(p, options, er, cb) - } - - if (st && st.isDirectory()) { - return rmdir(p, options, er, cb) - } - - options.unlink(p, er => { - if (er) { - if (er.code === 'ENOENT') { - return cb(null) - } - if (er.code === 'EPERM') { - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - } - if (er.code === 'EISDIR') { - return rmdir(p, options, er, cb) - } - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) { - cb(er2.code === 'ENOENT' ? null : er) - } else { - options.stat(p, (er3, stats) => { - if (er3) { - cb(er3.code === 'ENOENT' ? null : er) - } else if (stats.isDirectory()) { - rmdir(p, options, er, cb) - } else { - options.unlink(p, cb) - } - }) - } - }) -} - -function fixWinEPERMSync (p, options, er) { - let stats - - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === 'ENOENT') { - return - } else { - throw er - } - } - - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === 'ENOENT') { - return - } else { - throw er - } - } - - if (stats.isDirectory()) { - rmdirSync(p, options, er) - } else { - options.unlinkSync(p) - } -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { - rmkids(p, options, cb) - } else if (er && er.code === 'ENOTDIR') { - cb(originalEr) - } else { - cb(er) - } - }) -} - -function rmkids (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) return cb(er) - - let n = files.length - let errState - - if (n === 0) return options.rmdir(p, cb) - - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) { - return - } - if (er) return cb(errState = er) - if (--n === 0) { - options.rmdir(p, cb) - } - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - let st - - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === 'ENOENT') { - return - } - - // Windows can EPERM on stat. Life is suffering. - if (er.code === 'EPERM' && isWindows) { - fixWinEPERMSync(p, options, er) - } - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) { - rmdirSync(p, options, null) - } else { - options.unlinkSync(p) - } - } catch (er) { - if (er.code === 'ENOENT') { - return - } else if (er.code === 'EPERM') { - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - } else if (er.code !== 'EISDIR') { - throw er - } - rmdirSync(p, options, er) - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === 'ENOTDIR') { - throw originalEr - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { - rmkidsSync(p, options) - } else if (er.code !== 'ENOENT') { - throw er - } - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - if (isWindows) { - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const startTime = Date.now() - do { - try { - const ret = options.rmdirSync(p, options) - return ret - } catch {} - } while (Date.now() - startTime < 500) // give up after 500ms - } else { - const ret = options.rmdirSync(p, options) - return ret - } -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), - -/***/ 3901: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const util = __nccwpck_require__(1669) - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -function checkPaths (src, dest, funcName, opts, cb) { - util.callbackify(getStats)(src, dest, opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return cb(null, { srcStat, destStat, isChangingCase: true }) - } - return cb(new Error('Source and destination must not be the same.')) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return cb(null, { srcStat, destStat }) - }) -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -function checkParentPaths (src, srcStat, dest, funcName, cb) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() - fs.stat(destParent, { bigint: true }, (err, destStat) => { - if (err) { - if (err.code === 'ENOENT') return cb() - return cb(err) - } - if (areIdentical(srcStat, destStat)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return checkParentPaths(src, srcStat, destParent, funcName, cb) - }) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - checkPaths, - checkPathsSync, - checkParentPaths, - checkParentPathsSync, - isSrcSubdir, - areIdentical -} - - -/***/ }), - -/***/ 2548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function utimesMillis (path, atime, mtime, callback) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - fs.open(path, 'r+', (err, fd) => { - if (err) return callback(err) - fs.futimes(fd, atime, mtime, futimesErr => { - fs.close(fd, closeErr => { - if (callback) callback(futimesErr || closeErr) - }) - }) - }) -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis, - utimesMillisSync -} - - -/***/ }), - -/***/ 7356: -/***/ ((module) => { - -"use strict"; - - -module.exports = clone - -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} - -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj - - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) - - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) - - return copy -} - - -/***/ }), - -/***/ 7758: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var fs = __nccwpck_require__(5747) -var polyfills = __nccwpck_require__(263) -var legacy = __nccwpck_require__(3086) -var clone = __nccwpck_require__(7356) - -var util = __nccwpck_require__(1669) - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - resetQueue() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - resetQueue() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb, startTime) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb, startTime) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb, startTime) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 - } - return go$copyFile(src, dest, flags, cb) - - function go$copyFile (src, dest, flags, cb, startTime) { - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - var noReaddirOptionVersions = /^v[0-5]\./ - function readdir (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - var go$readdir = noReaddirOptionVersions.test(process.version) - ? function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, fs$readdirCallback( - path, options, cb, startTime - )) - } - : function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, options, fs$readdirCallback( - path, options, cb, startTime - )) - } - - return go$readdir(path, options, cb) - - function fs$readdirCallback (path, options, cb, startTime) { - return function (err, files) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([ - go$readdir, - [path, options, cb], - err, - startTime || Date.now(), - Date.now() - ]) - else { - if (files && files.sort) - files.sort() - - if (typeof cb === 'function') - cb.call(this, err, files) - } - } - } - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } - - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } - - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null - - return go$open(path, flags, mode, cb) - - function go$open (path, flags, mode, cb, startTime) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) - retry() -} - -// keep track of the timeout between retry() calls -var retryTimer - -// reset the startTime and lastTime to now -// this resets the start of the 60 second overall timeout as well as the -// delay between attempts so that we'll retry these jobs sooner -function resetQueue () { - var now = Date.now() - for (var i = 0; i < fs[gracefulQueue].length; ++i) { - // entries that are only a length of 2 are from an older version, don't - // bother modifying those since they'll be retried anyway. - if (fs[gracefulQueue][i].length > 2) { - fs[gracefulQueue][i][3] = now // startTime - fs[gracefulQueue][i][4] = now // lastTime - } - } - // call retry to make sure we're actively processing the queue - retry() -} - -function retry () { - // clear the timer and remove it to help prevent unintended concurrency - clearTimeout(retryTimer) - retryTimer = undefined - - if (fs[gracefulQueue].length === 0) - return - - var elem = fs[gracefulQueue].shift() - var fn = elem[0] - var args = elem[1] - // these items may be unset if they were added by an older graceful-fs - var err = elem[2] - var startTime = elem[3] - var lastTime = elem[4] - - // if we don't have a startTime we have no way of knowing if we've waited - // long enough, so go ahead and retry this item now - if (startTime === undefined) { - debug('RETRY', fn.name, args) - fn.apply(null, args) - } else if (Date.now() - startTime >= 60000) { - // it's been more than 60 seconds total, bail now - debug('TIMEOUT', fn.name, args) - var cb = args.pop() - if (typeof cb === 'function') - cb.call(null, err) - } else { - // the amount of time between the last attempt and right now - var sinceAttempt = Date.now() - lastTime - // the amount of time between when we first tried, and when we last tried - // rounded up to at least 1 - var sinceStart = Math.max(lastTime - startTime, 1) - // backoff. wait longer than the total time we've been retrying, but only - // up to a maximum of 100ms - var desiredDelay = Math.min(sinceStart * 1.2, 100) - // it's been long enough since the last retry, do it again - if (sinceAttempt >= desiredDelay) { - debug('RETRY', fn.name, args) - fn.apply(null, args.concat([startTime])) - } else { - // if we can't do this job yet, push it to the end of the queue - // and let the next iteration check again - fs[gracefulQueue].push(elem) - } - } - - // schedule our next run if one isn't already scheduled - if (retryTimer === undefined) { - retryTimer = setTimeout(retry, 0) - } -} - - -/***/ }), - -/***/ 3086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(2413).Stream - -module.exports = legacy - -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); - - Stream.call(this); - - var self = this; - - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; - - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.encoding) this.setEncoding(this.encoding); - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } - - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - - this.pos = this.start; - } - - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } - - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } - - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); - - Stream.call(this); - - this.path = path; - this.fd = null; - this.writable = true; - - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; - } - - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); - } - } -} - - -/***/ }), - -/***/ 263: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var constants = __nccwpck_require__(7619) - -var origCwd = process.cwd -var cwd = null - -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} - -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} - -module.exports = patch - -function patch (fs) { - // (re-)implement some things that are known busted or missing. - - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) - } - - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) - } - - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. - - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) - - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) - - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) - - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) - - // if lchmod/lchown do not exist, then make them no-ops - if (fs.chmod && !fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (fs.chown && !fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } - - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. - - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = typeof fs.rename !== 'function' ? fs.rename - : (function (fs$rename) { - function rename (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) - return rename - })(fs.rename) - } - - // if read() returns EAGAIN, then just try it again. - fs.read = typeof fs.read !== 'function' ? fs.read - : (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) - - fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync - : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) - - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } - - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - - } else if (fs.futimes) { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } - - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } - - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - return stats; - } - } - - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true - - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true - } - - return false - } -} - - -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 6160: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -let _fs -try { - _fs = __nccwpck_require__(7758) -} catch (_) { - _fs = __nccwpck_require__(5747) -} -const universalify = __nccwpck_require__(9046) -const { stringify, stripBom } = __nccwpck_require__(5902) - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - let data = await universalify.fromCallback(fs.readFile)(file, options) - - data = stripBom(data) - - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - - return obj -} - -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } -} - -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - - await universalify.fromCallback(fs.writeFile)(file, str, options) -} - -const writeFile = universalify.fromPromise(_writeFile) - -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} - -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} - -module.exports = jsonfile - - -/***/ }), - -/***/ 5902: -/***/ ((module) => { - -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) - - return str.replace(/\n/g, EOL) + EOF -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } - - -/***/ }), - -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__nccwpck_require__(2413)); -var http = _interopDefault(__nccwpck_require__(8605)); -var Url = _interopDefault(__nccwpck_require__(8835)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(7211)); -var zlib = _interopDefault(__nccwpck_require__(8761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = __nccwpck_require__(2877).convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = Url.URL || whatwgUrl.URL; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; - - -/***/ }), - -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), - -/***/ 4256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var punycode = __nccwpck_require__(4213); -var mappingTable = __nccwpck_require__(68); - -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; - -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} - -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; - - while (start <= end) { - var mid = Math.floor((start + end) / 2); - - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; - } - } - - return null; -} - -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} - -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; - - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); - - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } - - processed += String.fromCodePoint(codePoint); - break; - } - } - - return { - string: processed, - error: hasError - }; -} - -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } - - var error = false; - - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; - } - - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; - } - } - - return { - label: label, - error: error - }; -} - -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); - - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } - } - - return { - string: labels.join("."), - error: result.error - }; -} - -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; - } - }); - - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; - } - - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; - } - } - } - - if (result.error) return null; - return labels.join("."); -}; - -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - - return { - domain: result.string, - error: result.error - }; -}; - -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - - -/***/ }), - -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(4219); - - -/***/ }), - -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var net = __nccwpck_require__(1631); -var tls = __nccwpck_require__(4016); -var http = __nccwpck_require__(8605); -var https = __nccwpck_require__(7211); -var events = __nccwpck_require__(8614); -var assert = __nccwpck_require__(2357); -var util = __nccwpck_require__(1669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), - -/***/ 5030: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9046: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - fn.call( - this, - ...args, - (err, res) => (err != null) ? reject(err) : resolve(res) - ) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) - }, 'name', { value: fn.name }) -} - - -/***/ }), - -/***/ 4886: -/***/ ((module) => { - -"use strict"; - - -var conversions = {}; -module.exports = conversions; - -function sign(x) { - return x < 0 ? -1 : 1; -} - -function evenRound(x) { - // Round x to the nearest integer, choosing the even integer if it lies halfway between two. - if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) - return Math.floor(x); - } else { - return Math.round(x); - } -} - -function createNumberConversion(bitLength, typeOpts) { - if (!typeOpts.unsigned) { - --bitLength; - } - const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); - const upperBound = Math.pow(2, bitLength) - 1; - - const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); - const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); - - return function(V, opts) { - if (!opts) opts = {}; - - let x = +V; - - if (opts.enforceRange) { - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite number"); - } - - x = sign(x) * Math.floor(Math.abs(x)); - if (x < lowerBound || x > upperBound) { - throw new TypeError("Argument is not in byte range"); - } - - return x; - } - - if (!isNaN(x) && opts.clamp) { - x = evenRound(x); - - if (x < lowerBound) x = lowerBound; - if (x > upperBound) x = upperBound; - return x; - } - - if (!Number.isFinite(x) || x === 0) { - return 0; - } - - x = sign(x) * Math.floor(Math.abs(x)); - x = x % moduloVal; - - if (!typeOpts.unsigned && x >= moduloBound) { - return x - moduloVal; - } else if (typeOpts.unsigned) { - if (x < 0) { - x += moduloVal; - } else if (x === -0) { // don't return negative zero - return 0; - } - } - - return x; - } -} - -conversions["void"] = function () { - return undefined; -}; - -conversions["boolean"] = function (val) { - return !!val; -}; - -conversions["byte"] = createNumberConversion(8, { unsigned: false }); -conversions["octet"] = createNumberConversion(8, { unsigned: true }); - -conversions["short"] = createNumberConversion(16, { unsigned: false }); -conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); - -conversions["long"] = createNumberConversion(32, { unsigned: false }); -conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); - -conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); -conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); - -conversions["double"] = function (V) { - const x = +V; - - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite floating-point value"); - } - - return x; -}; - -conversions["unrestricted double"] = function (V) { - const x = +V; - - if (isNaN(x)) { - throw new TypeError("Argument is NaN"); - } - - return x; -}; - -// not quite valid, but good enough for JS -conversions["float"] = conversions["double"]; -conversions["unrestricted float"] = conversions["unrestricted double"]; - -conversions["DOMString"] = function (V, opts) { - if (!opts) opts = {}; - - if (opts.treatNullAsEmptyString && V === null) { - return ""; - } - - return String(V); -}; - -conversions["ByteString"] = function (V, opts) { - const x = String(V); - let c = undefined; - for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { - if (c > 255) { - throw new TypeError("Argument is not a valid bytestring"); - } - } - - return x; -}; - -conversions["USVString"] = function (V) { - const S = String(V); - const n = S.length; - const U = []; - for (let i = 0; i < n; ++i) { - const c = S.charCodeAt(i); - if (c < 0xD800 || c > 0xDFFF) { - U.push(String.fromCodePoint(c)); - } else if (0xDC00 <= c && c <= 0xDFFF) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - if (i === n - 1) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - const d = S.charCodeAt(i + 1); - if (0xDC00 <= d && d <= 0xDFFF) { - const a = c & 0x3FF; - const b = d & 0x3FF; - U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); - ++i; - } else { - U.push(String.fromCodePoint(0xFFFD)); - } - } - } - } - - return U.join(''); -}; - -conversions["Date"] = function (V, opts) { - if (!(V instanceof Date)) { - throw new TypeError("Argument is not a Date object"); - } - if (isNaN(V)) { - return undefined; - } - - return V; -}; - -conversions["RegExp"] = function (V, opts) { - if (!(V instanceof RegExp)) { - V = new RegExp(V); - } - - return V; -}; - - -/***/ }), - -/***/ 7537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -const usm = __nccwpck_require__(2158); - -exports.implementation = class URLImpl { - constructor(constructorArgs) { - const url = constructorArgs[0]; - const base = constructorArgs[1]; - - let parsedBase = null; - if (base !== undefined) { - parsedBase = usm.basicURLParse(base); - if (parsedBase === "failure") { - throw new TypeError("Invalid base URL"); - } - } - - const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - - // TODO: query stuff - } - - get href() { - return usm.serializeURL(this._url); - } - - set href(v) { - const parsedURL = usm.basicURLParse(v); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - } - - get origin() { - return usm.serializeURLOrigin(this._url); - } - - get protocol() { - return this._url.scheme + ":"; - } - - set protocol(v) { - usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); - } - - get username() { - return this._url.username; - } - - set username(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setTheUsername(this._url, v); - } - - get password() { - return this._url.password; - } - - set password(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setThePassword(this._url, v); - } - - get host() { - const url = this._url; - - if (url.host === null) { - return ""; - } - - if (url.port === null) { - return usm.serializeHost(url.host); - } - - return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); - } - - set host(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); - } - - get hostname() { - if (this._url.host === null) { - return ""; - } - - return usm.serializeHost(this._url.host); - } - - set hostname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); - } - - get port() { - if (this._url.port === null) { - return ""; - } - - return usm.serializeInteger(this._url.port); - } - - set port(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - if (v === "") { - this._url.port = null; - } else { - usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); - } - } - - get pathname() { - if (this._url.cannotBeABaseURL) { - return this._url.path[0]; - } - - if (this._url.path.length === 0) { - return ""; - } - - return "/" + this._url.path.join("/"); - } - - set pathname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - this._url.path = []; - usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); - } - - get search() { - if (this._url.query === null || this._url.query === "") { - return ""; - } - - return "?" + this._url.query; - } - - set search(v) { - // TODO: query stuff - - const url = this._url; - - if (v === "") { - url.query = null; - return; - } - - const input = v[0] === "?" ? v.substring(1) : v; - url.query = ""; - usm.basicURLParse(input, { url, stateOverride: "query" }); - } - - get hash() { - if (this._url.fragment === null || this._url.fragment === "") { - return ""; - } - - return "#" + this._url.fragment; - } - - set hash(v) { - if (v === "") { - this._url.fragment = null; - return; - } - - const input = v[0] === "#" ? v.substring(1) : v; - this._url.fragment = ""; - usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); - } - - toJSON() { - return this.href; - } -}; - - -/***/ }), - -/***/ 3394: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const conversions = __nccwpck_require__(4886); -const utils = __nccwpck_require__(3185); -const Impl = __nccwpck_require__(7537); - -const impl = utils.implSymbol; - -function URL(url) { - if (!this || this[impl] || !(this instanceof URL)) { - throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); - } - if (arguments.length < 1) { - throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); - } - const args = []; - for (let i = 0; i < arguments.length && i < 2; ++i) { - args[i] = arguments[i]; - } - args[0] = conversions["USVString"](args[0]); - if (args[1] !== undefined) { - args[1] = conversions["USVString"](args[1]); - } - - module.exports.setup(this, args); -} - -URL.prototype.toJSON = function toJSON() { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - const args = []; - for (let i = 0; i < arguments.length && i < 0; ++i) { - args[i] = arguments[i]; - } - return this[impl].toJSON.apply(this[impl], args); -}; -Object.defineProperty(URL.prototype, "href", { - get() { - return this[impl].href; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].href = V; - }, - enumerable: true, - configurable: true -}); - -URL.prototype.toString = function () { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - return this.href; -}; - -Object.defineProperty(URL.prototype, "origin", { - get() { - return this[impl].origin; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "protocol", { - get() { - return this[impl].protocol; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].protocol = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "username", { - get() { - return this[impl].username; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].username = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "password", { - get() { - return this[impl].password; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].password = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "host", { - get() { - return this[impl].host; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].host = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hostname", { - get() { - return this[impl].hostname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hostname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "port", { - get() { - return this[impl].port; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].port = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "pathname", { - get() { - return this[impl].pathname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].pathname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "search", { - get() { - return this[impl].search; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].search = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hash", { - get() { - return this[impl].hash; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hash = V; - }, - enumerable: true, - configurable: true -}); - - -module.exports = { - is(obj) { - return !!obj && obj[impl] instanceof Impl.implementation; - }, - create(constructorArgs, privateData) { - let obj = Object.create(URL.prototype); - this.setup(obj, constructorArgs, privateData); - return obj; - }, - setup(obj, constructorArgs, privateData) { - if (!privateData) privateData = {}; - privateData.wrapper = obj; - - obj[impl] = new Impl.implementation(constructorArgs, privateData); - obj[impl][utils.wrapperSymbol] = obj; - }, - interface: URL, - expose: { - Window: { URL: URL }, - Worker: { URL: URL } - } -}; - - - -/***/ }), - -/***/ 8665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -exports.URL = __nccwpck_require__(3394).interface; -exports.serializeURL = __nccwpck_require__(2158).serializeURL; -exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; -exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; -exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; -exports.setThePassword = __nccwpck_require__(2158).setThePassword; -exports.serializeHost = __nccwpck_require__(2158).serializeHost; -exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; -exports.parseURL = __nccwpck_require__(2158).parseURL; - - -/***/ }), - -/***/ 2158: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const punycode = __nccwpck_require__(4213); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; - - -/***/ }), - -/***/ 3185: -/***/ ((module) => { - -"use strict"; - - -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } -}; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; -}; - -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; -}; - - - -/***/ }), - -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} - - -/***/ }), - -/***/ 7786: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockMap = __nccwpck_require__(6638); -var resolveBlockSeq = __nccwpck_require__(2257); -var resolveFlowCollection = __nccwpck_require__(6085); - -function composeCollection(CN, ctx, token, tagToken, onError) { - let coll; - switch (token.type) { - case 'block-map': { - coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError); - break; - } - case 'block-seq': { - coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError); - break; - } - case 'flow-collection': { - coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError); - break; - } - } - if (!tagToken) - return coll; - const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (!tagName) - return coll; - // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841 - const Coll = coll.constructor; - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - const expType = Node.isMap(coll) ? 'map' : 'seq'; - let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - coll.tag = tagName; - return coll; - } - } - const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options); - const node = Node.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; - - -/***/ }), - -/***/ 9157: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Document = __nccwpck_require__(1785); -var composeNode = __nccwpck_require__(6572); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; - - -/***/ }), - -/***/ 6572: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var composeCollection = __nccwpck_require__(7786); -var composeScalar = __nccwpck_require__(1490); -var resolveEnd = __nccwpck_require__(1789); -var utilEmptyScalarPosition = __nccwpck_require__(8517); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) - node.comment = comment; - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; - - -/***/ }), - -/***/ 1490: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[Node.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[Node.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[Node.SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[Node.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; - - -/***/ }), - -/***/ 9386: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var directives = __nccwpck_require__(8729); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var Node = __nccwpck_require__(9752); -var composeDoc = __nccwpck_require__(9157); -var resolveEnd = __nccwpck_require__(1789); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (Node.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; - - -/***/ }), - -/***/ 6638: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilFlowIndentCheck = __nccwpck_require__(7013); -var utilMapIncludes = __nccwpck_require__(379); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) { - const map = new YAMLMap.YAMLMap(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - // TODO: assert being at last item? - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - map.range = [bm.offset, offset, offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; - - -/***/ }), - -/***/ 8172: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; - - -/***/ }), - -/***/ 2257: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var resolveProps = __nccwpck_require__(9663); -var utilFlowIndentCheck = __nccwpck_require__(7013); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) { - const seq = new YAMLSeq.YAMLSeq(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - offset = props.end; - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - // TODO: assert being at last item? - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, offset, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; - - -/***/ }), - -/***/ 1789: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; - - -/***/ }), - -/***/ 6085: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilMapIncludes = __nccwpck_require__(379); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const coll = isMap - ? new YAMLMap.YAMLMap(ctx.schema) - : new YAMLSeq.YAMLSeq(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (Node.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; - - -/***/ }), - -/***/ 6390: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var resolveEnd = __nccwpck_require__(1789); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', - a: '\x07', - b: '\b', - e: '\x1b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', - v: '\v', - N: '\u0085', - _: '\u00a0', - L: '\u2028', - P: '\u2029', - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; - - -/***/ }), - -/***/ 9663: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; - - -/***/ }), - -/***/ 7801: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; - - -/***/ }), - -/***/ 8517: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; - - -/***/ }), - -/***/ 7013: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var utilContainsNewline = __nccwpck_require__(7801); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; - - -/***/ }), - -/***/ 379: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (Node.isScalar(a) && - Node.isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; - - -/***/ }), - -/***/ 1785: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var toJS = __nccwpck_require__(8842); -var Schema = __nccwpck_require__(7425); -var stringify = __nccwpck_require__(1922); -var stringifyDocument = __nccwpck_require__(9615); -var anchors = __nccwpck_require__(584); -var applyReviver = __nccwpck_require__(9833); -var createNode = __nccwpck_require__(9807); -var directives = __nccwpck_require__(8729); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - if (value === undefined) - this.contents = null; - else { - this.contents = this.createNode(value, _replacer, options); - } - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [Node.NODE_TYPE]: { value: Node.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - copy.contents = Node.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && Node.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return Node.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && Node.isScalar(this.contents) - ? this.contents.value - : this.contents; - return Node.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return Node.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) - this.contents = value; - else if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, - stringify: stringify.stringify - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (Node.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; - - -/***/ }), - -/***/ 584: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (Node.isScalar(ref.node) || Node.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; - - -/***/ }), - -/***/ 9833: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; - - -/***/ }), - -/***/ 9807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (Node.isDocument(value)) - value = value.contents; - if (Node.isNode(value)) - return value; - if (Node.isPair(value)) { - const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[Node.MAP] - : Symbol.iterator in Object(value) - ? schema[Node.SEQ] - : schema[Node.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; - - -/***/ }), - -/***/ 8729: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) - return prefix + decodeURIComponent(suffix); - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (Node.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; - - -/***/ }), - -/***/ 3305: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.min(end.col - col, 80 - ci); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; - - -/***/ }), - -/***/ 5065: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var Schema = __nccwpck_require__(7425); -var errors = __nccwpck_require__(3305); -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); -var publicApi = __nccwpck_require__(9191); -var visit = __nccwpck_require__(8234); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = Node.isAlias; -exports.isCollection = Node.isCollection; -exports.isDocument = Node.isDocument; -exports.isMap = Node.isMap; -exports.isNode = Node.isNode; -exports.isPair = Node.isPair; -exports.isScalar = Node.isScalar; -exports.isSeq = Node.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; - - -/***/ }), - -/***/ 469: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; - - -/***/ }), - -/***/ 9712: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var visit = __nccwpck_require__(8234); -var Node = __nccwpck_require__(9752); - -class Alias extends Node.NodeBase { - constructor(source) { - super(Node.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - const data = anchors.get(source); - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (Node.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (Node.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (Node.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; - - -/***/ }), - -/***/ 9797: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (Node.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (Node.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && Node.isScalar(node) ? node.value : node; - else - return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!Node.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - Node.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return Node.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (Node.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; - - -/***/ }), - -/***/ 9752: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } -} - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.NodeBase = NodeBase; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; - - -/***/ }), - -/***/ 3497: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var stringifyPair = __nccwpck_require__(10); -var addPairToJSMap = __nccwpck_require__(5193); -var Node = __nccwpck_require__(9752); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (Node.isNode(key)) - key = key.clone(schema); - if (Node.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; - - -/***/ }), - -/***/ 8160: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var toJS = __nccwpck_require__(8842); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(Node.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; - - -/***/ }), - -/***/ 6761: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var addPairToJSMap = __nccwpck_require__(5193); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); - -function findPair(items, key) { - const k = Node.isScalar(key) ? key.value : key; - for (const it of items) { - if (Node.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (Node.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - constructor(schema) { - super(Node.MAP, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (Node.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!Node.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; - - -/***/ }), - -/***/ 3810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -class YAMLSeq extends Collection.Collection { - constructor(schema) { - super(Node.SEQ, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && Node.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (Node.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } -} -function asItemIndex(key) { - let idx = Node.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; - - -/***/ }), - -/***/ 5193: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var log = __nccwpck_require__(469); -var stringify = __nccwpck_require__(1922); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (Node.isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (Node.isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!Node.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (Node.isNode(key) && ctx && ctx.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; - - -/***/ }), - -/***/ 8842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !Node.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; - - -/***/ }), - -/***/ 4000: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); -var errors = __nccwpck_require__(3305); -var stringifyString = __nccwpck_require__(6084); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; - - -/***/ }), - -/***/ 28: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; - - -/***/ }), - -/***/ 1621: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; - - -/***/ }), - -/***/ 9726: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cstScalar = __nccwpck_require__(4000); -var cstStringify = __nccwpck_require__(28); -var cstVisit = __nccwpck_require__(1621); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; - - -/***/ }), - -/***/ 7263: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - const cs = line.indexOf('#'); - if (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') - dirEnd = cs - 1; - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; - - -/***/ }), - -/***/ 5748: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; - - -/***/ }), - -/***/ 6466: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; - - -/***/ }), - -/***/ 9191: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var log = __nccwpck_require__(469); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; - - -/***/ }), - -/***/ 7425: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var tags = __nccwpck_require__(2102); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, Node.MAP, { value: map.map }); - Object.defineProperty(this, Node.SCALAR, { value: string.string }); - Object.defineProperty(this, Node.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; - - -/***/ }), - -/***/ 1400: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -function createMap(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new YAMLMap.YAMLMap(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; -} -const map = { - collection: 'map', - createNode: createMap, - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!Node.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.map = map; - - -/***/ }), - -/***/ 7556: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; - - -/***/ }), - -/***/ 6698: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); -var YAMLSeq = __nccwpck_require__(3810); - -function createSeq(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new YAMLSeq.YAMLSeq(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; -} -const seq = { - collection: 'seq', - createNode: createSeq, - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!Node.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - } -}; - -exports.seq = seq; - - -/***/ }), - -/***/ 1576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyString = __nccwpck_require__(6084); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; - - -/***/ }), - -/***/ 7369: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; - - -/***/ }), - -/***/ 616: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 2521: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 7504: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 8553: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; - - -/***/ }), - -/***/ 2102: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); -var schema = __nccwpck_require__(7504); -var schema$1 = __nccwpck_require__(8553); -var binary = __nccwpck_require__(5758); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var schema$2 = __nccwpck_require__(8536); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; - - -/***/ }), - -/***/ 5758: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyString = __nccwpck_require__(6084); - -const binary = { - identify: value => value instanceof Uint8Array, - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; - - -/***/ }), - -/***/ 2684: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; - - -/***/ }), - -/***/ 8090: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 7254: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 4486: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var toJS = __nccwpck_require__(8842); -var Node = __nccwpck_require__(9752); -var YAMLMap = __nccwpck_require__(6761); -var pairs = __nccwpck_require__(5169); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (Node.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (Node.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new YAMLOMap(); - omap.items = pairs$1.items; - return omap; - } -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; - - -/***/ }), - -/***/ 5169: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLSeq = __nccwpck_require__(3810); - -function resolvePairs(seq, onError) { - if (Node.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (Node.isPair(item)) - continue; - else if (Node.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else - throw new TypeError(`Expected { key: value } tuple: ${it}`); - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; - - -/***/ }), - -/***/ 8536: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var binary = __nccwpck_require__(5758); -var bool = __nccwpck_require__(2684); -var float = __nccwpck_require__(8090); -var int = __nccwpck_require__(7254); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 9549: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (Node.isPair(key)) - pair = key; - else if (typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && Node.isPair(pair) - ? Node.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - resolve(map, onError) { - if (Node.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - }, - createNode(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new YAMLSet(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; - - -/***/ }), - -/***/ 7717: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => (n < 10 ? '0' + String(n) : String(n))) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; - - -/***/ }), - -/***/ 1883: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i); - end = i + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i) { - let ch = text[i + 1]; - while (ch === ' ' || ch === '\t') { - do { - ch = text[(i += 1)]; - } while (ch && ch !== '\n'); - ch = text[i + 1]; - } - return i; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; - - -/***/ }), - -/***/ 1922: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var Node = __nccwpck_require__(9752); -var stringifyComment = __nccwpck_require__(5577); -var stringifyString = __nccwpck_require__(6084); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (Node.isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (Node.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (Node.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = Node.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : Node.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return Node.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; - - -/***/ }), - -/***/ 3541: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { - const { indent, indentStep, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = Node.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik && ik.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - let str; - const { start, end } = flowChars; - if (lines.length === 0) { - str = start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength; - } - if (reqNewline) { - str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - str += `\n${indent}${end}`; - } - else { - str = `${start} ${lines.join(' ')} ${end}`; - } - } - if (comment) { - str += stringifyComment.lineComment(str, commentString(comment), indent); - if (onComment) - onComment(); - } - return str; -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; - - -/***/ }), - -/***/ 5577: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; - - -/***/ }), - -/***/ 9615: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (Node.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; - - -/***/ }), - -/***/ 780: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; - - -/***/ }), - -/***/ 10: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (Node.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (Node.isCollection(key)) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - Node.isCollection(key) || - (Node.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vcb = ''; - let valueComment = null; - if (Node.isNode(value)) { - if (value.spaceBefore) - vcb = '\n'; - if (value.commentBefore) { - const cs = commentString(value.commentBefore); - vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - valueComment = value.comment; - } - else if (value && typeof value === 'object') { - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && Node.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - Node.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substr(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (vcb || keyComment) { - if (valueStr === '' && !ctx.inFlow) - ws = vcb === '\n' ? '\n\n' : vcb; - else - ws = `${vcb}\n${ctx.indent}`; - } - else if (!explicitKey && Node.isCollection(value)) { - const flow = valueStr[0] === '[' || valueStr[0] === '{'; - if (!flow || valueStr.includes('\n')) - ws = `\n${ctx.indent}`; - } - else if (valueStr === '' || valueStr[0] === '\n') - ws = ''; - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; - - -/***/ }), - -/***/ 6084: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var foldFlowLines = __nccwpck_require__(1883); - -const getFoldOptions = (ctx) => ({ - indentAtStart: ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -function blockString({ comment, type, value }, ctx, onComment, onChompKeep) { - const { blockQuote, commentString, lineWidth } = ctx.options; - // 1. Block can't end in whitespace unless the last line is non-empty. - // 2. Strings consisting of only whitespace are best rendered explicitly. - if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { - return quotedString(value, ctx); - } - const indent = ctx.indent || - (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); - const literal = blockQuote === 'literal' - ? true - : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED - ? false - : type === Scalar.Scalar.BLOCK_LITERAL - ? true - : !lineLengthOverLimit(value, lineWidth, indent.length); - if (!value) - return literal ? '|\n' : '>\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, inFlow } = ctx; - if ((implicitKey && /[\n[\]{},]/.test(value)) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (indent === '' && containsDocumentMarker(value)) { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; - - -/***/ }), - -/***/ 8234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (Node.isMap(node)) - return visitor.Map?.(key, node, path); - if (Node.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (Node.isPair(node)) - return visitor.Pair?.(key, node, path); - if (Node.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (Node.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (Node.isCollection(parent)) { - parent.items[key] = node; - } - else if (Node.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (Node.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = Node.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; - - -/***/ }), - -/***/ 2877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 68: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]'); - -/***/ }), - -/***/ 2357: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 7619: -/***/ ((module) => { - -"use strict"; -module.exports = require("constants"); - -/***/ }), - -/***/ 8614: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 5747: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 8605: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 7211: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1631: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 2087: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 5622: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 4213: -/***/ ((module) => { - -"use strict"; -module.exports = require("punycode"); - -/***/ }), - -/***/ 2413: -/***/ ((module) => { - -"use strict"; -module.exports = require("stream"); - -/***/ }), - -/***/ 4016: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 8835: -/***/ ((module) => { - -"use strict"; -module.exports = require("url"); - -/***/ }), - -/***/ 1669: -/***/ ((module) => { - -"use strict"; -module.exports = require("util"); - -/***/ }), - -/***/ 8761: -/***/ ((module) => { - -"use strict"; -module.exports = require("zlib"); - -/***/ }) - -/******/ }); -/************************************************************************/ -/******/ // The module cache -/******/ var __webpack_module_cache__ = {}; -/******/ -/******/ // The require function -/******/ function __nccwpck_require__(moduleId) { -/******/ // Check if module is in cache -/******/ var cachedModule = __webpack_module_cache__[moduleId]; -/******/ if (cachedModule !== undefined) { -/******/ return cachedModule.exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = __webpack_module_cache__[moduleId] = { -/******/ // no module.id needed -/******/ // no module.loaded needed -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete __webpack_module_cache__[moduleId]; -/******/ } -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/************************************************************************/ -/******/ /* webpack/runtime/compat */ -/******/ -/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; -/******/ -/************************************************************************/ -/******/ -/******/ // startup -/******/ // Load entry module and return exports -/******/ // This entry module is referenced by other modules so it can't be inlined -/******/ var __webpack_exports__ = __nccwpck_require__(3109); -/******/ module.exports = __webpack_exports__; -/******/ -/******/ })() -; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/.github/actions/collect-charts/dist/index.js.map b/.github/actions/collect-charts/dist/index.js.map deleted file mode 100644 index 5b2e3c01..00000000 --- a/.github/actions/collect-charts/dist/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../webpack://verify-chart-version-bump/./lib/main.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/core.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/file-command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/path-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/summary.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/context.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/github.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/internal/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/auth.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/proxy.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/core/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/index.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/add.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/register.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/remove.js","../webpack://verify-chart-version-bump/./node_modules/deprecation/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/empty/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/file.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/link.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-paths.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-type.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/fs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/jsonfile.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/make-dir.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/utils.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/output-file/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/path-exists/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/rimraf.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/stat.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/utimes.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/clone.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/graceful-fs.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/legacy-streams.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/polyfills.js","../webpack://verify-chart-version-bump/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/index.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/utils.js","../webpack://verify-chart-version-bump/./node_modules/node-fetch/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/once/once.js","../webpack://verify-chart-version-bump/./node_modules/tr46/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/lib/tunnel.js","../webpack://verify-chart-version-bump/./node_modules/universal-user-agent/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/universalify/index.js","../webpack://verify-chart-version-bump/./node_modules/webidl-conversions/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/public-api.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/wrappy/wrappy.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/composer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/Document.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/anchors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/createNode.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/directives.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/errors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/index.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/log.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Alias.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Pair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/toJS.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/lexer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/line-counter.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/parser.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/public-api.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/Schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/null.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/string.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/json/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/tags.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/visit.js","../webpack://verify-chart-version-bump/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://verify-chart-version-bump/external \"assert\"","../webpack://verify-chart-version-bump/external \"constants\"","../webpack://verify-chart-version-bump/external \"events\"","../webpack://verify-chart-version-bump/external \"fs\"","../webpack://verify-chart-version-bump/external \"http\"","../webpack://verify-chart-version-bump/external \"https\"","../webpack://verify-chart-version-bump/external \"net\"","../webpack://verify-chart-version-bump/external \"os\"","../webpack://verify-chart-version-bump/external \"path\"","../webpack://verify-chart-version-bump/external \"punycode\"","../webpack://verify-chart-version-bump/external \"stream\"","../webpack://verify-chart-version-bump/external \"tls\"","../webpack://verify-chart-version-bump/external \"url\"","../webpack://verify-chart-version-bump/external \"util\"","../webpack://verify-chart-version-bump/external \"zlib\"","../webpack://verify-chart-version-bump/webpack/bootstrap","../webpack://verify-chart-version-bump/webpack/runtime/compat","../webpack://verify-chart-version-bump/webpack/startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst path = __importStar(require(\"path\"));\nconst YAML = __importStar(require(\"yaml\"));\nconst fs = __importStar(require(\"fs-extra\"));\nfunction getErrorMessage(error) {\n if (error instanceof Error)\n return error.message;\n return String(error);\n}\nfunction requestAddedModifiedFiles(baseCommit, headCommit, githubToken) {\n return __awaiter(this, void 0, void 0, function* () {\n let result = [];\n const octokit = github.getOctokit(githubToken);\n core.info(`Base commit: ${baseCommit}`);\n core.info(`Head commit: ${headCommit}`);\n // Use GitHub's compare two commits API.\n const response = yield octokit.rest.repos.compareCommits({\n base: baseCommit,\n head: headCommit,\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n });\n // Ensure that the request was successful.\n if (response.status !== 200) {\n throw new Error(`The GitHub API returned ${response.status}, expected 200.`);\n }\n // Ensure that the head commit is ahead of the base commit.\n if (response.data.status !== \"ahead\") {\n throw new Error(`The head commit for this ${github.context.eventName} event is not ahead of the base commit.`);\n }\n const responseFiles = response.data.files || [];\n responseFiles.forEach((file) => {\n const filestatus = file.status;\n if (filestatus == \"added\" || filestatus == \"modified\") {\n result.push(file.filename);\n }\n });\n return result;\n });\n}\nfunction requestAllFiles(commit, githubToken) {\n return __awaiter(this, void 0, void 0, function* () {\n let result = [];\n const octokit = github.getOctokit(githubToken);\n core.info(`Commit SHA: ${commit}`);\n const response = yield octokit.rest.git.getTree({\n tree_sha: commit,\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n recursive: \"true\",\n });\n // Ensure that the request was successful.\n if (response.status !== 200) {\n throw new Error(`The GitHub API returned ${response.status}, expected 200.`);\n }\n const responseTreeItems = response.data.tree || [];\n responseTreeItems.forEach((item) => {\n if (item.type == \"blob\" && item.path) {\n result.push(item.path);\n }\n });\n return result;\n });\n}\nfunction getRepoConfig(configPath) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure that the repo config file exists.\n if (!(yield fs.pathExists(configPath))) {\n throw new Error(`${configPath} Does not exist!`);\n }\n const repoConfigRaw = yield fs.readFile(configPath, \"utf8\");\n const repoConfig = yield YAML.parse(repoConfigRaw);\n return repoConfig;\n });\n}\nfunction filterChangedCharts(files, parentFolder) {\n const filteredChartFiles = files.filter((file) => {\n const rel = path.relative(parentFolder, file);\n return !rel.startsWith(\"../\") && rel !== \"..\";\n });\n let changedCharts = [];\n filteredChartFiles.forEach((file) => {\n const absoluteParentFolder = path.resolve(parentFolder);\n const absoluteFileDirname = path.resolve(path.dirname(file));\n const relativeFileDirname = absoluteFileDirname.slice(absoluteParentFolder.length + 1);\n const chartPathParts = relativeFileDirname.split(\"/\");\n const chartType = chartPathParts[0];\n const chartName = chartPathParts[1];\n if (chartType && chartName) {\n changedCharts.push(`${chartType}/${chartName}`);\n }\n });\n // Return only unique items\n return changedCharts.filter((item, index) => changedCharts.indexOf(item) === index);\n}\nfunction run() {\n var _a, _b, _c, _d;\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const githubToken = core.getInput(\"token\", { required: true });\n const chartsFolder = core.getInput(\"chartsFolder\", { required: true });\n const repoConfigFilePath = core.getInput(\"repoConfigFile\", {\n required: true,\n });\n let getAllCharts = core.getInput(\"getAllCharts\", { required: false });\n const overrideCharts = core.getInput(\"overrideCharts\", { required: false });\n const repoConfig = yield getRepoConfig(repoConfigFilePath);\n core.info(`Repo configuration: ${JSON.stringify(repoConfig, undefined, 2)}`);\n if (overrideCharts && overrideCharts != \"[]\") {\n const responseCharts = YAML.parse(overrideCharts);\n core.info(`Charts: ${JSON.stringify(responseCharts, undefined, 2)}`);\n core.setOutput(\"charts\", responseCharts);\n return;\n }\n const eventName = github.context.eventName;\n let baseCommit;\n let headCommit;\n switch (eventName) {\n case \"pull_request\":\n baseCommit = (_b = (_a = github.context.payload.pull_request) === null || _a === void 0 ? void 0 : _a.base) === null || _b === void 0 ? void 0 : _b.sha;\n headCommit = (_d = (_c = github.context.payload.pull_request) === null || _c === void 0 ? void 0 : _c.head) === null || _d === void 0 ? void 0 : _d.sha;\n break;\n case \"push\":\n baseCommit = github.context.payload.before;\n headCommit = github.context.payload.after;\n break;\n case \"workflow_dispatch\":\n getAllCharts = \"true\";\n baseCommit = \"\";\n headCommit = github.context.sha;\n break;\n default:\n throw new Error(`This action only supports pull requests, pushes and workflow_dispatch,` +\n `${github.context.eventName} events are not supported.`);\n }\n let responseFiles;\n if (getAllCharts === \"true\") {\n responseFiles = yield requestAllFiles(headCommit, githubToken);\n }\n else {\n responseFiles = yield requestAddedModifiedFiles(baseCommit, headCommit, githubToken);\n }\n const changedCharts = filterChangedCharts(responseFiles, chartsFolder);\n const chartsToInstall = changedCharts.filter((x) => !repoConfig[\"excluded-charts-install\"].includes(x));\n const chartsToLint = changedCharts.filter((x) => !repoConfig[\"excluded-charts-lint\"].includes(x));\n core.info(`Charts: ${JSON.stringify(changedCharts, undefined, 2)}`);\n core.info(`Charts to lint: ${JSON.stringify(chartsToLint, undefined, 2)}`);\n core.info(`Charts to install: ${JSON.stringify(chartsToInstall, undefined, 2)}`);\n core.setOutput(\"charts\", changedCharts);\n core.setOutput(\"chartsToInstall\", chartsToInstall);\n core.setOutput(\"chartsToLint\", chartsToLint);\n }\n catch (error) {\n core.setFailed(getErrorMessage(error));\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.21.3\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n enumerableOnly && (symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n })), keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = null != arguments[i] ? arguments[i] : {};\n i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/audit-log\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /enterprises/{enterprise}/settings/billing/advanced-security\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/audit-log\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/external-groups\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/settings/billing/advanced-security\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForEnterprise: [\"GET /enterprises/{enterprise}/actions/cache/usage\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubAdvancedSecurityBillingGhe: [\"GET /enterprises/{enterprise}/settings/billing/advanced-security\"],\n getGithubAdvancedSecurityBillingOrg: [\"GET /orgs/{org}/settings/billing/advanced-security\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n getServerStatistics: [\"GET /enterprise-installation/{enterprise_or_org}/server-statistics\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.16.2\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirsSync = require('../mkdirs').mkdirsSync\nconst utimesMillisSync = require('../util/utimes').utimesMillisSync\nconst stat = require('../util/stat')\n\nfunction copySync (src, dest, opts) {\n if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n opts = opts || {}\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0002'\n )\n }\n\n const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'copy')\n return handleFilterAndCopy(destStat, src, dest, opts)\n}\n\nfunction handleFilterAndCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n const destParent = path.dirname(dest)\n if (!fs.existsSync(destParent)) mkdirsSync(destParent)\n return getStats(destStat, src, dest, opts)\n}\n\nfunction startCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n return getStats(destStat, src, dest, opts)\n}\n\nfunction getStats (destStat, src, dest, opts) {\n const statSync = opts.dereference ? fs.statSync : fs.lstatSync\n const srcStat = statSync(src)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)\n else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)\n else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)\n throw new Error(`Unknown file: ${src}`)\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts) {\n if (!destStat) return copyFile(srcStat, src, dest, opts)\n return mayCopyFile(srcStat, src, dest, opts)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts) {\n if (opts.overwrite) {\n fs.unlinkSync(dest)\n return copyFile(srcStat, src, dest, opts)\n } else if (opts.errorOnExist) {\n throw new Error(`'${dest}' already exists`)\n }\n}\n\nfunction copyFile (srcStat, src, dest, opts) {\n fs.copyFileSync(src, dest)\n if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)\n return setDestMode(dest, srcStat.mode)\n}\n\nfunction handleTimestamps (srcMode, src, dest) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)\n return setDestTimestamps(src, dest)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode) {\n return setDestMode(dest, srcMode | 0o200)\n}\n\nfunction setDestMode (dest, srcMode) {\n return fs.chmodSync(dest, srcMode)\n}\n\nfunction setDestTimestamps (src, dest) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n const updatedSrcStat = fs.statSync(src)\n return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)\n return copyDir(src, dest, opts)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts) {\n fs.mkdirSync(dest)\n copyDir(src, dest, opts)\n return setDestMode(dest, srcMode)\n}\n\nfunction copyDir (src, dest, opts) {\n fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))\n}\n\nfunction copyDirItem (item, src, dest, opts) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)\n return startCopy(destStat, srcItem, destItem, opts)\n}\n\nfunction onLink (destStat, src, dest, opts) {\n let resolvedSrc = fs.readlinkSync(src)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlinkSync(resolvedSrc, dest)\n } else {\n let resolvedDest\n try {\n resolvedDest = fs.readlinkSync(dest)\n } catch (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)\n throw err\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)\n }\n\n // prevent copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)\n }\n return copyLink(resolvedSrc, dest)\n }\n}\n\nfunction copyLink (resolvedSrc, dest) {\n fs.unlinkSync(dest)\n return fs.symlinkSync(resolvedSrc, dest)\n}\n\nmodule.exports = copySync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirs = require('../mkdirs').mkdirs\nconst pathExists = require('../path-exists').pathExists\nconst utimesMillis = require('../util/utimes').utimesMillis\nconst stat = require('../util/stat')\n\nfunction copy (src, dest, opts, cb) {\n if (typeof opts === 'function' && !cb) {\n cb = opts\n opts = {}\n } else if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n cb = cb || function () {}\n opts = opts || {}\n\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0001'\n )\n }\n\n stat.checkPaths(src, dest, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n stat.checkParentPaths(src, srcStat, dest, 'copy', err => {\n if (err) return cb(err)\n if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)\n return checkParentDir(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction checkParentDir (destStat, src, dest, opts, cb) {\n const destParent = path.dirname(dest)\n pathExists(destParent, (err, dirExists) => {\n if (err) return cb(err)\n if (dirExists) return getStats(destStat, src, dest, opts, cb)\n mkdirs(destParent, err => {\n if (err) return cb(err)\n return getStats(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction handleFilter (onInclude, destStat, src, dest, opts, cb) {\n Promise.resolve(opts.filter(src, dest)).then(include => {\n if (include) return onInclude(destStat, src, dest, opts, cb)\n return cb()\n }, error => cb(error))\n}\n\nfunction startCopy (destStat, src, dest, opts, cb) {\n if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)\n return getStats(destStat, src, dest, opts, cb)\n}\n\nfunction getStats (destStat, src, dest, opts, cb) {\n const stat = opts.dereference ? fs.stat : fs.lstat\n stat(src, (err, srcStat) => {\n if (err) return cb(err)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)\n else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`))\n else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`))\n return cb(new Error(`Unknown file: ${src}`))\n })\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return copyFile(srcStat, src, dest, opts, cb)\n return mayCopyFile(srcStat, src, dest, opts, cb)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts, cb) {\n if (opts.overwrite) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return copyFile(srcStat, src, dest, opts, cb)\n })\n } else if (opts.errorOnExist) {\n return cb(new Error(`'${dest}' already exists`))\n } else return cb()\n}\n\nfunction copyFile (srcStat, src, dest, opts, cb) {\n fs.copyFile(src, dest, err => {\n if (err) return cb(err)\n if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)\n return setDestMode(dest, srcStat.mode, cb)\n })\n}\n\nfunction handleTimestampsAndMode (srcMode, src, dest, cb) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) {\n return makeFileWritable(dest, srcMode, err => {\n if (err) return cb(err)\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n })\n }\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode, cb) {\n return setDestMode(dest, srcMode | 0o200, cb)\n}\n\nfunction setDestTimestampsAndMode (srcMode, src, dest, cb) {\n setDestTimestamps(src, dest, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n}\n\nfunction setDestMode (dest, srcMode, cb) {\n return fs.chmod(dest, srcMode, cb)\n}\n\nfunction setDestTimestamps (src, dest, cb) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n fs.stat(src, (err, updatedSrcStat) => {\n if (err) return cb(err)\n return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)\n })\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)\n return copyDir(src, dest, opts, cb)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts, cb) {\n fs.mkdir(dest, err => {\n if (err) return cb(err)\n copyDir(src, dest, opts, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n })\n}\n\nfunction copyDir (src, dest, opts, cb) {\n fs.readdir(src, (err, items) => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n}\n\nfunction copyDirItems (items, src, dest, opts, cb) {\n const item = items.pop()\n if (!item) return cb()\n return copyDirItem(items, item, src, dest, opts, cb)\n}\n\nfunction copyDirItem (items, item, src, dest, opts, cb) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { destStat } = stats\n startCopy(destStat, srcItem, destItem, opts, err => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n })\n}\n\nfunction onLink (destStat, src, dest, opts, cb) {\n fs.readlink(src, (err, resolvedSrc) => {\n if (err) return cb(err)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlink(resolvedSrc, dest, cb)\n } else {\n fs.readlink(dest, (err, resolvedDest) => {\n if (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)\n return cb(err)\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))\n }\n\n // do not copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))\n }\n return copyLink(resolvedSrc, dest, cb)\n })\n }\n })\n}\n\nfunction copyLink (resolvedSrc, dest, cb) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return fs.symlink(resolvedSrc, dest, cb)\n })\n}\n\nmodule.exports = copy\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n copy: u(require('./copy')),\n copySync: require('./copy-sync')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst remove = require('../remove')\n\nconst emptyDir = u(async function emptyDir (dir) {\n let items\n try {\n items = await fs.readdir(dir)\n } catch {\n return mkdir.mkdirs(dir)\n }\n\n return Promise.all(items.map(item => remove.remove(path.join(dir, item))))\n})\n\nfunction emptyDirSync (dir) {\n let items\n try {\n items = fs.readdirSync(dir)\n } catch {\n return mkdir.mkdirsSync(dir)\n }\n\n items.forEach(item => {\n item = path.join(dir, item)\n remove.removeSync(item)\n })\n}\n\nmodule.exports = {\n emptyDirSync,\n emptydirSync: emptyDirSync,\n emptyDir,\n emptydir: emptyDir\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\n\nfunction createFile (file, callback) {\n function makeFile () {\n fs.writeFile(file, '', err => {\n if (err) return callback(err)\n callback()\n })\n }\n\n fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err\n if (!err && stats.isFile()) return callback()\n const dir = path.dirname(file)\n fs.stat(dir, (err, stats) => {\n if (err) {\n // if the directory doesn't exist, make it\n if (err.code === 'ENOENT') {\n return mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeFile()\n })\n }\n return callback(err)\n }\n\n if (stats.isDirectory()) makeFile()\n else {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdir(dir, err => {\n if (err) return callback(err)\n })\n }\n })\n })\n}\n\nfunction createFileSync (file) {\n let stats\n try {\n stats = fs.statSync(file)\n } catch {}\n if (stats && stats.isFile()) return\n\n const dir = path.dirname(file)\n try {\n if (!fs.statSync(dir).isDirectory()) {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdirSync(dir)\n }\n } catch (err) {\n // If the stat call above failed because the directory doesn't exist, create it\n if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)\n else throw err\n }\n\n fs.writeFileSync(file, '')\n}\n\nmodule.exports = {\n createFile: u(createFile),\n createFileSync\n}\n","'use strict'\n\nconst { createFile, createFileSync } = require('./file')\nconst { createLink, createLinkSync } = require('./link')\nconst { createSymlink, createSymlinkSync } = require('./symlink')\n\nmodule.exports = {\n // file\n createFile,\n createFileSync,\n ensureFile: createFile,\n ensureFileSync: createFileSync,\n // link\n createLink,\n createLinkSync,\n ensureLink: createLink,\n ensureLinkSync: createLinkSync,\n // symlink\n createSymlink,\n createSymlinkSync,\n ensureSymlink: createSymlink,\n ensureSymlinkSync: createSymlinkSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\nconst { areIdentical } = require('../util/stat')\n\nfunction createLink (srcpath, dstpath, callback) {\n function makeLink (srcpath, dstpath) {\n fs.link(srcpath, dstpath, err => {\n if (err) return callback(err)\n callback(null)\n })\n }\n\n fs.lstat(dstpath, (_, dstStat) => {\n fs.lstat(srcpath, (err, srcStat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n return callback(err)\n }\n if (dstStat && areIdentical(srcStat, dstStat)) return callback(null)\n\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return makeLink(srcpath, dstpath)\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeLink(srcpath, dstpath)\n })\n })\n })\n })\n}\n\nfunction createLinkSync (srcpath, dstpath) {\n let dstStat\n try {\n dstStat = fs.lstatSync(dstpath)\n } catch {}\n\n try {\n const srcStat = fs.lstatSync(srcpath)\n if (dstStat && areIdentical(srcStat, dstStat)) return\n } catch (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n throw err\n }\n\n const dir = path.dirname(dstpath)\n const dirExists = fs.existsSync(dir)\n if (dirExists) return fs.linkSync(srcpath, dstpath)\n mkdir.mkdirsSync(dir)\n\n return fs.linkSync(srcpath, dstpath)\n}\n\nmodule.exports = {\n createLink: u(createLink),\n createLinkSync\n}\n","'use strict'\n\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst pathExists = require('../path-exists').pathExists\n\n/**\n * Function that returns two types of paths, one relative to symlink, and one\n * relative to the current working directory. Checks if path is absolute or\n * relative. If the path is relative, this function checks if the path is\n * relative to symlink or relative to current working directory. This is an\n * initiative to find a smarter `srcpath` to supply when building symlinks.\n * This allows you to determine which path to use out of one of three possible\n * types of source paths. The first is an absolute path. This is detected by\n * `path.isAbsolute()`. When an absolute path is provided, it is checked to\n * see if it exists. If it does it's used, if not an error is returned\n * (callback)/ thrown (sync). The other two options for `srcpath` are a\n * relative url. By default Node's `fs.symlink` works by creating a symlink\n * using `dstpath` and expects the `srcpath` to be relative to the newly\n * created symlink. If you provide a `srcpath` that does not exist on the file\n * system it results in a broken symlink. To minimize this, the function\n * checks to see if the 'relative to symlink' source file exists, and if it\n * does it will use it. If it does not, it checks if there's a file that\n * exists that is relative to the current working directory, if does its used.\n * This preserves the expectations of the original fs.symlink spec and adds\n * the ability to pass in `relative to current working direcotry` paths.\n */\n\nfunction symlinkPaths (srcpath, dstpath, callback) {\n if (path.isAbsolute(srcpath)) {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: srcpath\n })\n })\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n return pathExists(relativeToDst, (err, exists) => {\n if (err) return callback(err)\n if (exists) {\n return callback(null, {\n toCwd: relativeToDst,\n toDst: srcpath\n })\n } else {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n })\n })\n }\n })\n }\n}\n\nfunction symlinkPathsSync (srcpath, dstpath) {\n let exists\n if (path.isAbsolute(srcpath)) {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('absolute srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: srcpath\n }\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n exists = fs.existsSync(relativeToDst)\n if (exists) {\n return {\n toCwd: relativeToDst,\n toDst: srcpath\n }\n } else {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('relative srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n }\n }\n }\n}\n\nmodule.exports = {\n symlinkPaths,\n symlinkPathsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction symlinkType (srcpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n if (type) return callback(null, type)\n fs.lstat(srcpath, (err, stats) => {\n if (err) return callback(null, 'file')\n type = (stats && stats.isDirectory()) ? 'dir' : 'file'\n callback(null, type)\n })\n}\n\nfunction symlinkTypeSync (srcpath, type) {\n let stats\n\n if (type) return type\n try {\n stats = fs.lstatSync(srcpath)\n } catch {\n return 'file'\n }\n return (stats && stats.isDirectory()) ? 'dir' : 'file'\n}\n\nmodule.exports = {\n symlinkType,\n symlinkTypeSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('../fs')\nconst _mkdirs = require('../mkdirs')\nconst mkdirs = _mkdirs.mkdirs\nconst mkdirsSync = _mkdirs.mkdirsSync\n\nconst _symlinkPaths = require('./symlink-paths')\nconst symlinkPaths = _symlinkPaths.symlinkPaths\nconst symlinkPathsSync = _symlinkPaths.symlinkPathsSync\n\nconst _symlinkType = require('./symlink-type')\nconst symlinkType = _symlinkType.symlinkType\nconst symlinkTypeSync = _symlinkType.symlinkTypeSync\n\nconst pathExists = require('../path-exists').pathExists\n\nconst { areIdentical } = require('../util/stat')\n\nfunction createSymlink (srcpath, dstpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n\n fs.lstat(dstpath, (err, stats) => {\n if (!err && stats.isSymbolicLink()) {\n Promise.all([\n fs.stat(srcpath),\n fs.stat(dstpath)\n ]).then(([srcStat, dstStat]) => {\n if (areIdentical(srcStat, dstStat)) return callback(null)\n _createSymlink(srcpath, dstpath, type, callback)\n })\n } else _createSymlink(srcpath, dstpath, type, callback)\n })\n}\n\nfunction _createSymlink (srcpath, dstpath, type, callback) {\n symlinkPaths(srcpath, dstpath, (err, relative) => {\n if (err) return callback(err)\n srcpath = relative.toDst\n symlinkType(relative.toCwd, type, (err, type) => {\n if (err) return callback(err)\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)\n mkdirs(dir, err => {\n if (err) return callback(err)\n fs.symlink(srcpath, dstpath, type, callback)\n })\n })\n })\n })\n}\n\nfunction createSymlinkSync (srcpath, dstpath, type) {\n let stats\n try {\n stats = fs.lstatSync(dstpath)\n } catch {}\n if (stats && stats.isSymbolicLink()) {\n const srcStat = fs.statSync(srcpath)\n const dstStat = fs.statSync(dstpath)\n if (areIdentical(srcStat, dstStat)) return\n }\n\n const relative = symlinkPathsSync(srcpath, dstpath)\n srcpath = relative.toDst\n type = symlinkTypeSync(relative.toCwd, type)\n const dir = path.dirname(dstpath)\n const exists = fs.existsSync(dir)\n if (exists) return fs.symlinkSync(srcpath, dstpath, type)\n mkdirsSync(dir)\n return fs.symlinkSync(srcpath, dstpath, type)\n}\n\nmodule.exports = {\n createSymlink: u(createSymlink),\n createSymlinkSync\n}\n","'use strict'\n// This is adapted from https://github.com/normalize/mz\n// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\n\nconst api = [\n 'access',\n 'appendFile',\n 'chmod',\n 'chown',\n 'close',\n 'copyFile',\n 'fchmod',\n 'fchown',\n 'fdatasync',\n 'fstat',\n 'fsync',\n 'ftruncate',\n 'futimes',\n 'lchmod',\n 'lchown',\n 'link',\n 'lstat',\n 'mkdir',\n 'mkdtemp',\n 'open',\n 'opendir',\n 'readdir',\n 'readFile',\n 'readlink',\n 'realpath',\n 'rename',\n 'rm',\n 'rmdir',\n 'stat',\n 'symlink',\n 'truncate',\n 'unlink',\n 'utimes',\n 'writeFile'\n].filter(key => {\n // Some commands are not available on some systems. Ex:\n // fs.opendir was added in Node.js v12.12.0\n // fs.rm was added in Node.js v14.14.0\n // fs.lchown is not available on at least some Linux\n return typeof fs[key] === 'function'\n})\n\n// Export cloned fs:\nObject.assign(exports, fs)\n\n// Universalify async methods:\napi.forEach(method => {\n exports[method] = u(fs[method])\n})\n\n// We differ from mz/fs in that we still ship the old, broken, fs.exists()\n// since we are a drop-in replacement for the native module\nexports.exists = function (filename, callback) {\n if (typeof callback === 'function') {\n return fs.exists(filename, callback)\n }\n return new Promise(resolve => {\n return fs.exists(filename, resolve)\n })\n}\n\n// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args\n\nexports.read = function (fd, buffer, offset, length, position, callback) {\n if (typeof callback === 'function') {\n return fs.read(fd, buffer, offset, length, position, callback)\n }\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {\n if (err) return reject(err)\n resolve({ bytesRead, buffer })\n })\n })\n}\n\n// Function signature can be\n// fs.write(fd, buffer[, offset[, length[, position]]], callback)\n// OR\n// fs.write(fd, string[, position[, encoding]], callback)\n// We need to handle both cases, so we use ...args\nexports.write = function (fd, buffer, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.write(fd, buffer, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffer })\n })\n })\n}\n\n// fs.writev only available in Node v12.9.0+\nif (typeof fs.writev === 'function') {\n // Function signature is\n // s.writev(fd, buffers[, position], callback)\n // We need to handle the optional arg, so we use ...args\n exports.writev = function (fd, buffers, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.writev(fd, buffers, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffers })\n })\n })\n }\n}\n\n// fs.realpath.native sometimes not available if fs is monkey-patched\nif (typeof fs.realpath.native === 'function') {\n exports.realpath.native = u(fs.realpath.native)\n} else {\n process.emitWarning(\n 'fs.realpath.native is not a function. Is fs being monkey-patched?',\n 'Warning', 'fs-extra-WARN0003'\n )\n}\n","'use strict'\n\nmodule.exports = {\n // Export promiseified graceful-fs:\n ...require('./fs'),\n // Export extra methods:\n ...require('./copy'),\n ...require('./empty'),\n ...require('./ensure'),\n ...require('./json'),\n ...require('./mkdirs'),\n ...require('./move'),\n ...require('./output-file'),\n ...require('./path-exists'),\n ...require('./remove')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst jsonFile = require('./jsonfile')\n\njsonFile.outputJson = u(require('./output-json'))\njsonFile.outputJsonSync = require('./output-json-sync')\n// aliases\njsonFile.outputJSON = jsonFile.outputJson\njsonFile.outputJSONSync = jsonFile.outputJsonSync\njsonFile.writeJSON = jsonFile.writeJson\njsonFile.writeJSONSync = jsonFile.writeJsonSync\njsonFile.readJSON = jsonFile.readJson\njsonFile.readJSONSync = jsonFile.readJsonSync\n\nmodule.exports = jsonFile\n","'use strict'\n\nconst jsonFile = require('jsonfile')\n\nmodule.exports = {\n // jsonfile exports\n readJson: jsonFile.readFile,\n readJsonSync: jsonFile.readFileSync,\n writeJson: jsonFile.writeFile,\n writeJsonSync: jsonFile.writeFileSync\n}\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFileSync } = require('../output-file')\n\nfunction outputJsonSync (file, data, options) {\n const str = stringify(data, options)\n\n outputFileSync(file, str, options)\n}\n\nmodule.exports = outputJsonSync\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFile } = require('../output-file')\n\nasync function outputJson (file, data, options = {}) {\n const str = stringify(data, options)\n\n await outputFile(file, str, options)\n}\n\nmodule.exports = outputJson\n","'use strict'\nconst u = require('universalify').fromPromise\nconst { makeDir: _makeDir, makeDirSync } = require('./make-dir')\nconst makeDir = u(_makeDir)\n\nmodule.exports = {\n mkdirs: makeDir,\n mkdirsSync: makeDirSync,\n // alias\n mkdirp: makeDir,\n mkdirpSync: makeDirSync,\n ensureDir: makeDir,\n ensureDirSync: makeDirSync\n}\n","'use strict'\nconst fs = require('../fs')\nconst { checkPath } = require('./utils')\n\nconst getMode = options => {\n const defaults = { mode: 0o777 }\n if (typeof options === 'number') return options\n return ({ ...defaults, ...options }).mode\n}\n\nmodule.exports.makeDir = async (dir, options) => {\n checkPath(dir)\n\n return fs.mkdir(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n\nmodule.exports.makeDirSync = (dir, options) => {\n checkPath(dir)\n\n return fs.mkdirSync(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n","// Adapted from https://github.com/sindresorhus/make-dir\n// Copyright (c) Sindre Sorhus (sindresorhus.com)\n// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n'use strict'\nconst path = require('path')\n\n// https://github.com/nodejs/node/issues/8987\n// https://github.com/libuv/libuv/pull/1088\nmodule.exports.checkPath = function checkPath (pth) {\n if (process.platform === 'win32') {\n const pathHasInvalidWinCharacters = /[<>:\"|?*]/.test(pth.replace(path.parse(pth).root, ''))\n\n if (pathHasInvalidWinCharacters) {\n const error = new Error(`Path contains invalid characters: ${pth}`)\n error.code = 'EINVAL'\n throw error\n }\n }\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n move: u(require('./move')),\n moveSync: require('./move-sync')\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copySync = require('../copy').copySync\nconst removeSync = require('../remove').removeSync\nconst mkdirpSync = require('../mkdirs').mkdirpSync\nconst stat = require('../util/stat')\n\nfunction moveSync (src, dest, opts) {\n opts = opts || {}\n const overwrite = opts.overwrite || opts.clobber || false\n\n const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'move')\n if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))\n return doRename(src, dest, overwrite, isChangingCase)\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase) {\n if (isChangingCase) return rename(src, dest, overwrite)\n if (overwrite) {\n removeSync(dest)\n return rename(src, dest, overwrite)\n }\n if (fs.existsSync(dest)) throw new Error('dest already exists.')\n return rename(src, dest, overwrite)\n}\n\nfunction rename (src, dest, overwrite) {\n try {\n fs.renameSync(src, dest)\n } catch (err) {\n if (err.code !== 'EXDEV') throw err\n return moveAcrossDevice(src, dest, overwrite)\n }\n}\n\nfunction moveAcrossDevice (src, dest, overwrite) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copySync(src, dest, opts)\n return removeSync(src)\n}\n\nmodule.exports = moveSync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copy = require('../copy').copy\nconst remove = require('../remove').remove\nconst mkdirp = require('../mkdirs').mkdirp\nconst pathExists = require('../path-exists').pathExists\nconst stat = require('../util/stat')\n\nfunction move (src, dest, opts, cb) {\n if (typeof opts === 'function') {\n cb = opts\n opts = {}\n }\n\n opts = opts || {}\n\n const overwrite = opts.overwrite || opts.clobber || false\n\n stat.checkPaths(src, dest, 'move', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, isChangingCase = false } = stats\n stat.checkParentPaths(src, srcStat, dest, 'move', err => {\n if (err) return cb(err)\n if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb)\n mkdirp(path.dirname(dest), err => {\n if (err) return cb(err)\n return doRename(src, dest, overwrite, isChangingCase, cb)\n })\n })\n })\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase, cb) {\n if (isChangingCase) return rename(src, dest, overwrite, cb)\n if (overwrite) {\n return remove(dest, err => {\n if (err) return cb(err)\n return rename(src, dest, overwrite, cb)\n })\n }\n pathExists(dest, (err, destExists) => {\n if (err) return cb(err)\n if (destExists) return cb(new Error('dest already exists.'))\n return rename(src, dest, overwrite, cb)\n })\n}\n\nfunction rename (src, dest, overwrite, cb) {\n fs.rename(src, dest, err => {\n if (!err) return cb()\n if (err.code !== 'EXDEV') return cb(err)\n return moveAcrossDevice(src, dest, overwrite, cb)\n })\n}\n\nfunction moveAcrossDevice (src, dest, overwrite, cb) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copy(src, dest, opts, err => {\n if (err) return cb(err)\n return remove(src, cb)\n })\n}\n\nmodule.exports = move\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction outputFile (file, data, encoding, callback) {\n if (typeof encoding === 'function') {\n callback = encoding\n encoding = 'utf8'\n }\n\n const dir = path.dirname(file)\n pathExists(dir, (err, itDoes) => {\n if (err) return callback(err)\n if (itDoes) return fs.writeFile(file, data, encoding, callback)\n\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n\n fs.writeFile(file, data, encoding, callback)\n })\n })\n}\n\nfunction outputFileSync (file, ...args) {\n const dir = path.dirname(file)\n if (fs.existsSync(dir)) {\n return fs.writeFileSync(file, ...args)\n }\n mkdir.mkdirsSync(dir)\n fs.writeFileSync(file, ...args)\n}\n\nmodule.exports = {\n outputFile: u(outputFile),\n outputFileSync\n}\n","'use strict'\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\n\nfunction pathExists (path) {\n return fs.access(path).then(() => true).catch(() => false)\n}\n\nmodule.exports = {\n pathExists: u(pathExists),\n pathExistsSync: fs.existsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst u = require('universalify').fromCallback\nconst rimraf = require('./rimraf')\n\nfunction remove (path, callback) {\n // Node 14.14.0+\n if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback)\n rimraf(path, callback)\n}\n\nfunction removeSync (path) {\n // Node 14.14.0+\n if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true })\n rimraf.sync(path)\n}\n\nmodule.exports = {\n remove: u(remove),\n removeSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst assert = require('assert')\n\nconst isWindows = (process.platform === 'win32')\n\nfunction defaults (options) {\n const methods = [\n 'unlink',\n 'chmod',\n 'stat',\n 'lstat',\n 'rmdir',\n 'readdir'\n ]\n methods.forEach(m => {\n options[m] = options[m] || fs[m]\n m = m + 'Sync'\n options[m] = options[m] || fs[m]\n })\n\n options.maxBusyTries = options.maxBusyTries || 3\n}\n\nfunction rimraf (p, options, cb) {\n let busyTries = 0\n\n if (typeof options === 'function') {\n cb = options\n options = {}\n }\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')\n assert(options, 'rimraf: invalid options argument provided')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n defaults(options)\n\n rimraf_(p, options, function CB (er) {\n if (er) {\n if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&\n busyTries < options.maxBusyTries) {\n busyTries++\n const time = busyTries * 100\n // try again, with the same exact callback as this one.\n return setTimeout(() => rimraf_(p, options, CB), time)\n }\n\n // already gone\n if (er.code === 'ENOENT') er = null\n }\n\n cb(er)\n })\n}\n\n// Two possible strategies.\n// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR\n// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR\n//\n// Both result in an extra syscall when you guess wrong. However, there\n// are likely far more normal files in the world than directories. This\n// is based on the assumption that a the average number of files per\n// directory is >= 1.\n//\n// If anyone ever complains about this, then I guess the strategy could\n// be made configurable somehow. But until then, YAGNI.\nfunction rimraf_ (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // sunos lets the root user unlink directories, which is... weird.\n // so we have to lstat here and make sure it's not a dir.\n options.lstat(p, (er, st) => {\n if (er && er.code === 'ENOENT') {\n return cb(null)\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er && er.code === 'EPERM' && isWindows) {\n return fixWinEPERM(p, options, er, cb)\n }\n\n if (st && st.isDirectory()) {\n return rmdir(p, options, er, cb)\n }\n\n options.unlink(p, er => {\n if (er) {\n if (er.code === 'ENOENT') {\n return cb(null)\n }\n if (er.code === 'EPERM') {\n return (isWindows)\n ? fixWinEPERM(p, options, er, cb)\n : rmdir(p, options, er, cb)\n }\n if (er.code === 'EISDIR') {\n return rmdir(p, options, er, cb)\n }\n }\n return cb(er)\n })\n })\n}\n\nfunction fixWinEPERM (p, options, er, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.chmod(p, 0o666, er2 => {\n if (er2) {\n cb(er2.code === 'ENOENT' ? null : er)\n } else {\n options.stat(p, (er3, stats) => {\n if (er3) {\n cb(er3.code === 'ENOENT' ? null : er)\n } else if (stats.isDirectory()) {\n rmdir(p, options, er, cb)\n } else {\n options.unlink(p, cb)\n }\n })\n }\n })\n}\n\nfunction fixWinEPERMSync (p, options, er) {\n let stats\n\n assert(p)\n assert(options)\n\n try {\n options.chmodSync(p, 0o666)\n } catch (er2) {\n if (er2.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n try {\n stats = options.statSync(p)\n } catch (er3) {\n if (er3.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n if (stats.isDirectory()) {\n rmdirSync(p, options, er)\n } else {\n options.unlinkSync(p)\n }\n}\n\nfunction rmdir (p, options, originalEr, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)\n // if we guessed wrong, and it's not a directory, then\n // raise the original error.\n options.rmdir(p, er => {\n if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {\n rmkids(p, options, cb)\n } else if (er && er.code === 'ENOTDIR') {\n cb(originalEr)\n } else {\n cb(er)\n }\n })\n}\n\nfunction rmkids (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.readdir(p, (er, files) => {\n if (er) return cb(er)\n\n let n = files.length\n let errState\n\n if (n === 0) return options.rmdir(p, cb)\n\n files.forEach(f => {\n rimraf(path.join(p, f), options, er => {\n if (errState) {\n return\n }\n if (er) return cb(errState = er)\n if (--n === 0) {\n options.rmdir(p, cb)\n }\n })\n })\n })\n}\n\n// this looks simpler, and is strictly *faster*, but will\n// tie up the JavaScript thread and fail on excessively\n// deep directory trees.\nfunction rimrafSync (p, options) {\n let st\n\n options = options || {}\n defaults(options)\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert(options, 'rimraf: missing options')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n try {\n st = options.lstatSync(p)\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er.code === 'EPERM' && isWindows) {\n fixWinEPERMSync(p, options, er)\n }\n }\n\n try {\n // sunos lets the root user unlink directories, which is... weird.\n if (st && st.isDirectory()) {\n rmdirSync(p, options, null)\n } else {\n options.unlinkSync(p)\n }\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n } else if (er.code === 'EPERM') {\n return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)\n } else if (er.code !== 'EISDIR') {\n throw er\n }\n rmdirSync(p, options, er)\n }\n}\n\nfunction rmdirSync (p, options, originalEr) {\n assert(p)\n assert(options)\n\n try {\n options.rmdirSync(p)\n } catch (er) {\n if (er.code === 'ENOTDIR') {\n throw originalEr\n } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {\n rmkidsSync(p, options)\n } else if (er.code !== 'ENOENT') {\n throw er\n }\n }\n}\n\nfunction rmkidsSync (p, options) {\n assert(p)\n assert(options)\n options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))\n\n if (isWindows) {\n // We only end up here once we got ENOTEMPTY at least once, and\n // at this point, we are guaranteed to have removed all the kids.\n // So, we know that it won't be ENOENT or ENOTDIR or anything else.\n // try really hard to delete stuff on windows, because it has a\n // PROFOUNDLY annoying habit of not closing handles promptly when\n // files are deleted, resulting in spurious ENOTEMPTY errors.\n const startTime = Date.now()\n do {\n try {\n const ret = options.rmdirSync(p, options)\n return ret\n } catch {}\n } while (Date.now() - startTime < 500) // give up after 500ms\n } else {\n const ret = options.rmdirSync(p, options)\n return ret\n }\n}\n\nmodule.exports = rimraf\nrimraf.sync = rimrafSync\n","'use strict'\n\nconst fs = require('../fs')\nconst path = require('path')\nconst util = require('util')\n\nfunction getStats (src, dest, opts) {\n const statFunc = opts.dereference\n ? (file) => fs.stat(file, { bigint: true })\n : (file) => fs.lstat(file, { bigint: true })\n return Promise.all([\n statFunc(src),\n statFunc(dest).catch(err => {\n if (err.code === 'ENOENT') return null\n throw err\n })\n ]).then(([srcStat, destStat]) => ({ srcStat, destStat }))\n}\n\nfunction getStatsSync (src, dest, opts) {\n let destStat\n const statFunc = opts.dereference\n ? (file) => fs.statSync(file, { bigint: true })\n : (file) => fs.lstatSync(file, { bigint: true })\n const srcStat = statFunc(src)\n try {\n destStat = statFunc(dest)\n } catch (err) {\n if (err.code === 'ENOENT') return { srcStat, destStat: null }\n throw err\n }\n return { srcStat, destStat }\n}\n\nfunction checkPaths (src, dest, funcName, opts, cb) {\n util.callbackify(getStats)(src, dest, opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return cb(null, { srcStat, destStat, isChangingCase: true })\n }\n return cb(new Error('Source and destination must not be the same.'))\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`))\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return cb(null, { srcStat, destStat })\n })\n}\n\nfunction checkPathsSync (src, dest, funcName, opts) {\n const { srcStat, destStat } = getStatsSync(src, dest, opts)\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return { srcStat, destStat, isChangingCase: true }\n }\n throw new Error('Source and destination must not be the same.')\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return { srcStat, destStat }\n}\n\n// recursively check if dest parent is a subdirectory of src.\n// It works for all file types including symlinks since it\n// checks the src and dest inodes. It starts from the deepest\n// parent and stops once it reaches the src parent or the root path.\nfunction checkParentPaths (src, srcStat, dest, funcName, cb) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()\n fs.stat(destParent, { bigint: true }, (err, destStat) => {\n if (err) {\n if (err.code === 'ENOENT') return cb()\n return cb(err)\n }\n if (areIdentical(srcStat, destStat)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return checkParentPaths(src, srcStat, destParent, funcName, cb)\n })\n}\n\nfunction checkParentPathsSync (src, srcStat, dest, funcName) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return\n let destStat\n try {\n destStat = fs.statSync(destParent, { bigint: true })\n } catch (err) {\n if (err.code === 'ENOENT') return\n throw err\n }\n if (areIdentical(srcStat, destStat)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return checkParentPathsSync(src, srcStat, destParent, funcName)\n}\n\nfunction areIdentical (srcStat, destStat) {\n return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev\n}\n\n// return true if dest is a subdir of src, otherwise false.\n// It only checks the path strings.\nfunction isSrcSubdir (src, dest) {\n const srcArr = path.resolve(src).split(path.sep).filter(i => i)\n const destArr = path.resolve(dest).split(path.sep).filter(i => i)\n return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)\n}\n\nfunction errMsg (src, dest, funcName) {\n return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`\n}\n\nmodule.exports = {\n checkPaths,\n checkPathsSync,\n checkParentPaths,\n checkParentPathsSync,\n isSrcSubdir,\n areIdentical\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction utimesMillis (path, atime, mtime, callback) {\n // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)\n fs.open(path, 'r+', (err, fd) => {\n if (err) return callback(err)\n fs.futimes(fd, atime, mtime, futimesErr => {\n fs.close(fd, closeErr => {\n if (callback) callback(futimesErr || closeErr)\n })\n })\n })\n}\n\nfunction utimesMillisSync (path, atime, mtime) {\n const fd = fs.openSync(path, 'r+')\n fs.futimesSync(fd, atime, mtime)\n return fs.closeSync(fd)\n}\n\nmodule.exports = {\n utimesMillis,\n utimesMillisSync\n}\n","'use strict'\n\nmodule.exports = clone\n\nvar getPrototypeOf = Object.getPrototypeOf || function (obj) {\n return obj.__proto__\n}\n\nfunction clone (obj) {\n if (obj === null || typeof obj !== 'object')\n return obj\n\n if (obj instanceof Object)\n var copy = { __proto__: getPrototypeOf(obj) }\n else\n var copy = Object.create(null)\n\n Object.getOwnPropertyNames(obj).forEach(function (key) {\n Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))\n })\n\n return copy\n}\n","var fs = require('fs')\nvar polyfills = require('./polyfills.js')\nvar legacy = require('./legacy-streams.js')\nvar clone = require('./clone.js')\n\nvar util = require('util')\n\n/* istanbul ignore next - node 0.x polyfill */\nvar gracefulQueue\nvar previousSymbol\n\n/* istanbul ignore else - node 0.x polyfill */\nif (typeof Symbol === 'function' && typeof Symbol.for === 'function') {\n gracefulQueue = Symbol.for('graceful-fs.queue')\n // This is used in testing by future versions\n previousSymbol = Symbol.for('graceful-fs.previous')\n} else {\n gracefulQueue = '___graceful-fs.queue'\n previousSymbol = '___graceful-fs.previous'\n}\n\nfunction noop () {}\n\nfunction publishQueue(context, queue) {\n Object.defineProperty(context, gracefulQueue, {\n get: function() {\n return queue\n }\n })\n}\n\nvar debug = noop\nif (util.debuglog)\n debug = util.debuglog('gfs4')\nelse if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || ''))\n debug = function() {\n var m = util.format.apply(util, arguments)\n m = 'GFS4: ' + m.split(/\\n/).join('\\nGFS4: ')\n console.error(m)\n }\n\n// Once time initialization\nif (!fs[gracefulQueue]) {\n // This queue can be shared by multiple loaded instances\n var queue = global[gracefulQueue] || []\n publishQueue(fs, queue)\n\n // Patch fs.close/closeSync to shared queue version, because we need\n // to retry() whenever a close happens *anywhere* in the program.\n // This is essential when multiple graceful-fs instances are\n // in play at the same time.\n fs.close = (function (fs$close) {\n function close (fd, cb) {\n return fs$close.call(fs, fd, function (err) {\n // This function uses the graceful-fs shared queue\n if (!err) {\n resetQueue()\n }\n\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n })\n }\n\n Object.defineProperty(close, previousSymbol, {\n value: fs$close\n })\n return close\n })(fs.close)\n\n fs.closeSync = (function (fs$closeSync) {\n function closeSync (fd) {\n // This function uses the graceful-fs shared queue\n fs$closeSync.apply(fs, arguments)\n resetQueue()\n }\n\n Object.defineProperty(closeSync, previousSymbol, {\n value: fs$closeSync\n })\n return closeSync\n })(fs.closeSync)\n\n if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || '')) {\n process.on('exit', function() {\n debug(fs[gracefulQueue])\n require('assert').equal(fs[gracefulQueue].length, 0)\n })\n }\n}\n\nif (!global[gracefulQueue]) {\n publishQueue(global, fs[gracefulQueue]);\n}\n\nmodule.exports = patch(clone(fs))\nif (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {\n module.exports = patch(fs)\n fs.__patched = true;\n}\n\nfunction patch (fs) {\n // Everything that references the open() function needs to be in here\n polyfills(fs)\n fs.gracefulify = patch\n\n fs.createReadStream = createReadStream\n fs.createWriteStream = createWriteStream\n var fs$readFile = fs.readFile\n fs.readFile = readFile\n function readFile (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$readFile(path, options, cb)\n\n function go$readFile (path, options, cb, startTime) {\n return fs$readFile(path, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$writeFile = fs.writeFile\n fs.writeFile = writeFile\n function writeFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$writeFile(path, data, options, cb)\n\n function go$writeFile (path, data, options, cb, startTime) {\n return fs$writeFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$appendFile = fs.appendFile\n if (fs$appendFile)\n fs.appendFile = appendFile\n function appendFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$appendFile(path, data, options, cb)\n\n function go$appendFile (path, data, options, cb, startTime) {\n return fs$appendFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$copyFile = fs.copyFile\n if (fs$copyFile)\n fs.copyFile = copyFile\n function copyFile (src, dest, flags, cb) {\n if (typeof flags === 'function') {\n cb = flags\n flags = 0\n }\n return go$copyFile(src, dest, flags, cb)\n\n function go$copyFile (src, dest, flags, cb, startTime) {\n return fs$copyFile(src, dest, flags, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$readdir = fs.readdir\n fs.readdir = readdir\n var noReaddirOptionVersions = /^v[0-5]\\./\n function readdir (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n var go$readdir = noReaddirOptionVersions.test(process.version)\n ? function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n : function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, options, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n\n return go$readdir(path, options, cb)\n\n function fs$readdirCallback (path, options, cb, startTime) {\n return function (err, files) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([\n go$readdir,\n [path, options, cb],\n err,\n startTime || Date.now(),\n Date.now()\n ])\n else {\n if (files && files.sort)\n files.sort()\n\n if (typeof cb === 'function')\n cb.call(this, err, files)\n }\n }\n }\n }\n\n if (process.version.substr(0, 4) === 'v0.8') {\n var legStreams = legacy(fs)\n ReadStream = legStreams.ReadStream\n WriteStream = legStreams.WriteStream\n }\n\n var fs$ReadStream = fs.ReadStream\n if (fs$ReadStream) {\n ReadStream.prototype = Object.create(fs$ReadStream.prototype)\n ReadStream.prototype.open = ReadStream$open\n }\n\n var fs$WriteStream = fs.WriteStream\n if (fs$WriteStream) {\n WriteStream.prototype = Object.create(fs$WriteStream.prototype)\n WriteStream.prototype.open = WriteStream$open\n }\n\n Object.defineProperty(fs, 'ReadStream', {\n get: function () {\n return ReadStream\n },\n set: function (val) {\n ReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n Object.defineProperty(fs, 'WriteStream', {\n get: function () {\n return WriteStream\n },\n set: function (val) {\n WriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n // legacy names\n var FileReadStream = ReadStream\n Object.defineProperty(fs, 'FileReadStream', {\n get: function () {\n return FileReadStream\n },\n set: function (val) {\n FileReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n var FileWriteStream = WriteStream\n Object.defineProperty(fs, 'FileWriteStream', {\n get: function () {\n return FileWriteStream\n },\n set: function (val) {\n FileWriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n function ReadStream (path, options) {\n if (this instanceof ReadStream)\n return fs$ReadStream.apply(this, arguments), this\n else\n return ReadStream.apply(Object.create(ReadStream.prototype), arguments)\n }\n\n function ReadStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n if (that.autoClose)\n that.destroy()\n\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n that.read()\n }\n })\n }\n\n function WriteStream (path, options) {\n if (this instanceof WriteStream)\n return fs$WriteStream.apply(this, arguments), this\n else\n return WriteStream.apply(Object.create(WriteStream.prototype), arguments)\n }\n\n function WriteStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n that.destroy()\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n }\n })\n }\n\n function createReadStream (path, options) {\n return new fs.ReadStream(path, options)\n }\n\n function createWriteStream (path, options) {\n return new fs.WriteStream(path, options)\n }\n\n var fs$open = fs.open\n fs.open = open\n function open (path, flags, mode, cb) {\n if (typeof mode === 'function')\n cb = mode, mode = null\n\n return go$open(path, flags, mode, cb)\n\n function go$open (path, flags, mode, cb, startTime) {\n return fs$open(path, flags, mode, function (err, fd) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n return fs\n}\n\nfunction enqueue (elem) {\n debug('ENQUEUE', elem[0].name, elem[1])\n fs[gracefulQueue].push(elem)\n retry()\n}\n\n// keep track of the timeout between retry() calls\nvar retryTimer\n\n// reset the startTime and lastTime to now\n// this resets the start of the 60 second overall timeout as well as the\n// delay between attempts so that we'll retry these jobs sooner\nfunction resetQueue () {\n var now = Date.now()\n for (var i = 0; i < fs[gracefulQueue].length; ++i) {\n // entries that are only a length of 2 are from an older version, don't\n // bother modifying those since they'll be retried anyway.\n if (fs[gracefulQueue][i].length > 2) {\n fs[gracefulQueue][i][3] = now // startTime\n fs[gracefulQueue][i][4] = now // lastTime\n }\n }\n // call retry to make sure we're actively processing the queue\n retry()\n}\n\nfunction retry () {\n // clear the timer and remove it to help prevent unintended concurrency\n clearTimeout(retryTimer)\n retryTimer = undefined\n\n if (fs[gracefulQueue].length === 0)\n return\n\n var elem = fs[gracefulQueue].shift()\n var fn = elem[0]\n var args = elem[1]\n // these items may be unset if they were added by an older graceful-fs\n var err = elem[2]\n var startTime = elem[3]\n var lastTime = elem[4]\n\n // if we don't have a startTime we have no way of knowing if we've waited\n // long enough, so go ahead and retry this item now\n if (startTime === undefined) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args)\n } else if (Date.now() - startTime >= 60000) {\n // it's been more than 60 seconds total, bail now\n debug('TIMEOUT', fn.name, args)\n var cb = args.pop()\n if (typeof cb === 'function')\n cb.call(null, err)\n } else {\n // the amount of time between the last attempt and right now\n var sinceAttempt = Date.now() - lastTime\n // the amount of time between when we first tried, and when we last tried\n // rounded up to at least 1\n var sinceStart = Math.max(lastTime - startTime, 1)\n // backoff. wait longer than the total time we've been retrying, but only\n // up to a maximum of 100ms\n var desiredDelay = Math.min(sinceStart * 1.2, 100)\n // it's been long enough since the last retry, do it again\n if (sinceAttempt >= desiredDelay) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args.concat([startTime]))\n } else {\n // if we can't do this job yet, push it to the end of the queue\n // and let the next iteration check again\n fs[gracefulQueue].push(elem)\n }\n }\n\n // schedule our next run if one isn't already scheduled\n if (retryTimer === undefined) {\n retryTimer = setTimeout(retry, 0)\n }\n}\n","var Stream = require('stream').Stream\n\nmodule.exports = legacy\n\nfunction legacy (fs) {\n return {\n ReadStream: ReadStream,\n WriteStream: WriteStream\n }\n\n function ReadStream (path, options) {\n if (!(this instanceof ReadStream)) return new ReadStream(path, options);\n\n Stream.call(this);\n\n var self = this;\n\n this.path = path;\n this.fd = null;\n this.readable = true;\n this.paused = false;\n\n this.flags = 'r';\n this.mode = 438; /*=0666*/\n this.bufferSize = 64 * 1024;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.encoding) this.setEncoding(this.encoding);\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.end === undefined) {\n this.end = Infinity;\n } else if ('number' !== typeof this.end) {\n throw TypeError('end must be a Number');\n }\n\n if (this.start > this.end) {\n throw new Error('start must be <= end');\n }\n\n this.pos = this.start;\n }\n\n if (this.fd !== null) {\n process.nextTick(function() {\n self._read();\n });\n return;\n }\n\n fs.open(this.path, this.flags, this.mode, function (err, fd) {\n if (err) {\n self.emit('error', err);\n self.readable = false;\n return;\n }\n\n self.fd = fd;\n self.emit('open', fd);\n self._read();\n })\n }\n\n function WriteStream (path, options) {\n if (!(this instanceof WriteStream)) return new WriteStream(path, options);\n\n Stream.call(this);\n\n this.path = path;\n this.fd = null;\n this.writable = true;\n\n this.flags = 'w';\n this.encoding = 'binary';\n this.mode = 438; /*=0666*/\n this.bytesWritten = 0;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.start < 0) {\n throw new Error('start must be >= zero');\n }\n\n this.pos = this.start;\n }\n\n this.busy = false;\n this._queue = [];\n\n if (this.fd === null) {\n this._open = fs.open;\n this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);\n this.flush();\n }\n }\n}\n","var constants = require('constants')\n\nvar origCwd = process.cwd\nvar cwd = null\n\nvar platform = process.env.GRACEFUL_FS_PLATFORM || process.platform\n\nprocess.cwd = function() {\n if (!cwd)\n cwd = origCwd.call(process)\n return cwd\n}\ntry {\n process.cwd()\n} catch (er) {}\n\n// This check is needed until node.js 12 is required\nif (typeof process.chdir === 'function') {\n var chdir = process.chdir\n process.chdir = function (d) {\n cwd = null\n chdir.call(process, d)\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)\n}\n\nmodule.exports = patch\n\nfunction patch (fs) {\n // (re-)implement some things that are known busted or missing.\n\n // lchmod, broken prior to 0.6.2\n // back-port the fix here.\n if (constants.hasOwnProperty('O_SYMLINK') &&\n process.version.match(/^v0\\.6\\.[0-2]|^v0\\.5\\./)) {\n patchLchmod(fs)\n }\n\n // lutimes implementation, or no-op\n if (!fs.lutimes) {\n patchLutimes(fs)\n }\n\n // https://github.com/isaacs/node-graceful-fs/issues/4\n // Chown should not fail on einval or eperm if non-root.\n // It should not fail on enosys ever, as this just indicates\n // that a fs doesn't support the intended operation.\n\n fs.chown = chownFix(fs.chown)\n fs.fchown = chownFix(fs.fchown)\n fs.lchown = chownFix(fs.lchown)\n\n fs.chmod = chmodFix(fs.chmod)\n fs.fchmod = chmodFix(fs.fchmod)\n fs.lchmod = chmodFix(fs.lchmod)\n\n fs.chownSync = chownFixSync(fs.chownSync)\n fs.fchownSync = chownFixSync(fs.fchownSync)\n fs.lchownSync = chownFixSync(fs.lchownSync)\n\n fs.chmodSync = chmodFixSync(fs.chmodSync)\n fs.fchmodSync = chmodFixSync(fs.fchmodSync)\n fs.lchmodSync = chmodFixSync(fs.lchmodSync)\n\n fs.stat = statFix(fs.stat)\n fs.fstat = statFix(fs.fstat)\n fs.lstat = statFix(fs.lstat)\n\n fs.statSync = statFixSync(fs.statSync)\n fs.fstatSync = statFixSync(fs.fstatSync)\n fs.lstatSync = statFixSync(fs.lstatSync)\n\n // if lchmod/lchown do not exist, then make them no-ops\n if (fs.chmod && !fs.lchmod) {\n fs.lchmod = function (path, mode, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchmodSync = function () {}\n }\n if (fs.chown && !fs.lchown) {\n fs.lchown = function (path, uid, gid, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchownSync = function () {}\n }\n\n // on Windows, A/V software can lock the directory, causing this\n // to fail with an EACCES or EPERM if the directory contains newly\n // created files. Try again on failure, for up to 60 seconds.\n\n // Set the timeout this long because some Windows Anti-Virus, such as Parity\n // bit9, may lock files for up to a minute, causing npm package install\n // failures. Also, take care to yield the scheduler. Windows scheduling gives\n // CPU to a busy looping process, which can cause the program causing the lock\n // contention to be starved of CPU by node, so the contention doesn't resolve.\n if (platform === \"win32\") {\n fs.rename = typeof fs.rename !== 'function' ? fs.rename\n : (function (fs$rename) {\n function rename (from, to, cb) {\n var start = Date.now()\n var backoff = 0;\n fs$rename(from, to, function CB (er) {\n if (er\n && (er.code === \"EACCES\" || er.code === \"EPERM\")\n && Date.now() - start < 60000) {\n setTimeout(function() {\n fs.stat(to, function (stater, st) {\n if (stater && stater.code === \"ENOENT\")\n fs$rename(from, to, CB);\n else\n cb(er)\n })\n }, backoff)\n if (backoff < 100)\n backoff += 10;\n return;\n }\n if (cb) cb(er)\n })\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)\n return rename\n })(fs.rename)\n }\n\n // if read() returns EAGAIN, then just try it again.\n fs.read = typeof fs.read !== 'function' ? fs.read\n : (function (fs$read) {\n function read (fd, buffer, offset, length, position, callback_) {\n var callback\n if (callback_ && typeof callback_ === 'function') {\n var eagCounter = 0\n callback = function (er, _, __) {\n if (er && er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n callback_.apply(this, arguments)\n }\n }\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n\n // This ensures `util.promisify` works as it does for native `fs.read`.\n if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)\n return read\n })(fs.read)\n\n fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync\n : (function (fs$readSync) { return function (fd, buffer, offset, length, position) {\n var eagCounter = 0\n while (true) {\n try {\n return fs$readSync.call(fs, fd, buffer, offset, length, position)\n } catch (er) {\n if (er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n continue\n }\n throw er\n }\n }\n }})(fs.readSync)\n\n function patchLchmod (fs) {\n fs.lchmod = function (path, mode, callback) {\n fs.open( path\n , constants.O_WRONLY | constants.O_SYMLINK\n , mode\n , function (err, fd) {\n if (err) {\n if (callback) callback(err)\n return\n }\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n fs.fchmod(fd, mode, function (err) {\n fs.close(fd, function(err2) {\n if (callback) callback(err || err2)\n })\n })\n })\n }\n\n fs.lchmodSync = function (path, mode) {\n var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)\n\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n var threw = true\n var ret\n try {\n ret = fs.fchmodSync(fd, mode)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n }\n\n function patchLutimes (fs) {\n if (constants.hasOwnProperty(\"O_SYMLINK\") && fs.futimes) {\n fs.lutimes = function (path, at, mt, cb) {\n fs.open(path, constants.O_SYMLINK, function (er, fd) {\n if (er) {\n if (cb) cb(er)\n return\n }\n fs.futimes(fd, at, mt, function (er) {\n fs.close(fd, function (er2) {\n if (cb) cb(er || er2)\n })\n })\n })\n }\n\n fs.lutimesSync = function (path, at, mt) {\n var fd = fs.openSync(path, constants.O_SYMLINK)\n var ret\n var threw = true\n try {\n ret = fs.futimesSync(fd, at, mt)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n\n } else if (fs.futimes) {\n fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }\n fs.lutimesSync = function () {}\n }\n }\n\n function chmodFix (orig) {\n if (!orig) return orig\n return function (target, mode, cb) {\n return orig.call(fs, target, mode, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chmodFixSync (orig) {\n if (!orig) return orig\n return function (target, mode) {\n try {\n return orig.call(fs, target, mode)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n\n function chownFix (orig) {\n if (!orig) return orig\n return function (target, uid, gid, cb) {\n return orig.call(fs, target, uid, gid, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chownFixSync (orig) {\n if (!orig) return orig\n return function (target, uid, gid) {\n try {\n return orig.call(fs, target, uid, gid)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n function statFix (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n function callback (er, stats) {\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n if (cb) cb.apply(this, arguments)\n }\n return options ? orig.call(fs, target, options, callback)\n : orig.call(fs, target, callback)\n }\n }\n\n function statFixSync (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options) {\n var stats = options ? orig.call(fs, target, options)\n : orig.call(fs, target)\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n return stats;\n }\n }\n\n // ENOSYS means that the fs doesn't support the op. Just ignore\n // that, because it doesn't matter.\n //\n // if there's no getuid, or if getuid() is something other\n // than 0, and the error is EINVAL or EPERM, then just ignore\n // it.\n //\n // This specific case is a silent failure in cp, install, tar,\n // and most other unix tools that manage permissions.\n //\n // When running as root, or if other types of errors are\n // encountered, then it's strict.\n function chownErOk (er) {\n if (!er)\n return true\n\n if (er.code === \"ENOSYS\")\n return true\n\n var nonroot = !process.getuid || process.getuid() !== 0\n if (nonroot) {\n if (er.code === \"EINVAL\" || er.code === \"EPERM\")\n return true\n }\n\n return false\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","let _fs\ntry {\n _fs = require('graceful-fs')\n} catch (_) {\n _fs = require('fs')\n}\nconst universalify = require('universalify')\nconst { stringify, stripBom } = require('./utils')\n\nasync function _readFile (file, options = {}) {\n if (typeof options === 'string') {\n options = { encoding: options }\n }\n\n const fs = options.fs || _fs\n\n const shouldThrow = 'throws' in options ? options.throws : true\n\n let data = await universalify.fromCallback(fs.readFile)(file, options)\n\n data = stripBom(data)\n\n let obj\n try {\n obj = JSON.parse(data, options ? options.reviver : null)\n } catch (err) {\n if (shouldThrow) {\n err.message = `${file}: ${err.message}`\n throw err\n } else {\n return null\n }\n }\n\n return obj\n}\n\nconst readFile = universalify.fromPromise(_readFile)\n\nfunction readFileSync (file, options = {}) {\n if (typeof options === 'string') {\n options = { encoding: options }\n }\n\n const fs = options.fs || _fs\n\n const shouldThrow = 'throws' in options ? options.throws : true\n\n try {\n let content = fs.readFileSync(file, options)\n content = stripBom(content)\n return JSON.parse(content, options.reviver)\n } catch (err) {\n if (shouldThrow) {\n err.message = `${file}: ${err.message}`\n throw err\n } else {\n return null\n }\n }\n}\n\nasync function _writeFile (file, obj, options = {}) {\n const fs = options.fs || _fs\n\n const str = stringify(obj, options)\n\n await universalify.fromCallback(fs.writeFile)(file, str, options)\n}\n\nconst writeFile = universalify.fromPromise(_writeFile)\n\nfunction writeFileSync (file, obj, options = {}) {\n const fs = options.fs || _fs\n\n const str = stringify(obj, options)\n // not sure if fs.writeFileSync returns anything, but just in case\n return fs.writeFileSync(file, str, options)\n}\n\nconst jsonfile = {\n readFile,\n readFileSync,\n writeFile,\n writeFileSync\n}\n\nmodule.exports = jsonfile\n","function stringify (obj, { EOL = '\\n', finalEOL = true, replacer = null, spaces } = {}) {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(obj, replacer, spaces)\n\n return str.replace(/\\n/g, EOL) + EOF\n}\n\nfunction stripBom (content) {\n // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified\n if (Buffer.isBuffer(content)) content = content.toString('utf8')\n return content.replace(/^\\uFEFF/, '')\n}\n\nmodule.exports = { stringify, stripBom }\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","'use strict'\n\nexports.fromCallback = function (fn) {\n return Object.defineProperty(function (...args) {\n if (typeof args[args.length - 1] === 'function') fn.apply(this, args)\n else {\n return new Promise((resolve, reject) => {\n fn.call(\n this,\n ...args,\n (err, res) => (err != null) ? reject(err) : resolve(res)\n )\n })\n }\n }, 'name', { value: fn.name })\n}\n\nexports.fromPromise = function (fn) {\n return Object.defineProperty(function (...args) {\n const cb = args[args.length - 1]\n if (typeof cb !== 'function') return fn.apply(this, args)\n else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)\n }, 'name', { value: fn.name })\n}\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment)\n node.comment = comment;\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n // TODO: assert being at last item?\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n map.range = [bm.offset, offset, offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n offset = props.end;\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n // TODO: assert being at last item?\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, offset, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.min(end.col - col, 80 - ci);\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start} ${lines.join(' ')} ${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, commentString(comment), indent);\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vcb = '';\n let valueComment = null;\n if (Node.isNode(value)) {\n if (value.spaceBefore)\n vcb = '\\n';\n if (value.commentBefore) {\n const cs = commentString(value.commentBefore);\n vcb += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n valueComment = value.comment;\n }\n else if (value && typeof value === 'object') {\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substr(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (vcb || keyComment) {\n if (valueStr === '' && !ctx.inFlow)\n ws = vcb === '\\n' ? '\\n\\n' : vcb;\n else\n ws = `${vcb}\\n${ctx.indent}`;\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const flow = valueStr[0] === '[' || valueStr[0] === '{';\n if (!flow || valueStr.includes('\\n'))\n ws = `\\n${ctx.indent}`;\n }\n else if (valueStr === '' || valueStr[0] === '\\n')\n ws = '';\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx) => ({\n indentAtStart: ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (indent === '' && containsDocumentMarker(value)) {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"constants\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACllCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1OA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC/bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChqDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChMA;;;A;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC3OA;;;A;;;;;;A;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;ACDA;AACA;AACA;AACA;;;;A","sourceRoot":""} \ No newline at end of file diff --git a/.github/actions/collect-charts/dist/licenses.txt b/.github/actions/collect-charts/dist/licenses.txt deleted file mode 100644 index 8ea22858..00000000 --- a/.github/actions/collect-charts/dist/licenses.txt +++ /dev/null @@ -1,720 +0,0 @@ -@actions/core -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/github -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/http-client -MIT -Actions Http Client for Node.js - -Copyright (c) GitHub, Inc. - -All rights reserved. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -associated documentation files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT -LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/auth-token -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/core -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/endpoint -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/graphql -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/plugin-paginate-rest -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/plugin-rest-endpoint-methods -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/request -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/request-error -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@vercel/ncc -MIT -Copyright 2018 ZEIT, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -before-after-hook -Apache-2.0 - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2018 Gregor Martynus and other contributors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - -deprecation -ISC -The ISC License - -Copyright (c) Gregor Martynus and contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -fs-extra -MIT -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -graceful-fs -ISC -The ISC License - -Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -is-plain-object -MIT -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -jsonfile -MIT -(The MIT License) - -Copyright (c) 2012-2015, JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -node-fetch -MIT -The MIT License (MIT) - -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - - -once -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -tr46 -MIT - -tunnel -MIT -The MIT License (MIT) - -Copyright (c) 2012 Koichi Kobayashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -universal-user-agent -ISC -# [ISC License](https://spdx.org/licenses/ISC) - -Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -universalify -MIT -(The MIT License) - -Copyright (c) 2017, Ryan Zimmerman - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the 'Software'), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -webidl-conversions -BSD-2-Clause -# The BSD 2-Clause License - -Copyright (c) 2014, Domenic Denicola -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -whatwg-url -MIT -The MIT License (MIT) - -Copyright (c) 2015–2016 Sebastian Mayr - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -wrappy -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -yaml -ISC -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/.github/actions/collect-charts/dist/sourcemap-register.js b/.github/actions/collect-charts/dist/sourcemap-register.js deleted file mode 100644 index 56566f1d..00000000 --- a/.github/actions/collect-charts/dist/sourcemap-register.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},284:(e,r,n)=>{e=n.nmd(e);var t=n(596).SourceMapConsumer;var o=n(622);var i;try{i=n(747);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(process.version)?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var a=n(837).I;var u=n(215);var s=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var a=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},747:e=>{"use strict";e.exports=require("fs")},622:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); \ No newline at end of file diff --git a/.github/actions/collect-charts/package-lock.json b/.github/actions/collect-charts/package-lock.json deleted file mode 100644 index de87d340..00000000 --- a/.github/actions/collect-charts/package-lock.json +++ /dev/null @@ -1,567 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "verify-chart-version-bump", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "fs-extra": "^10.1.0", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } - }, - "node_modules/@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "dependencies": { - "@actions/http-client": "^2.0.1" - } - }, - "node_modules/@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "dependencies": { - "tunnel": "^0.0.6" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "node_modules/@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true, - "bin": { - "ncc": "dist/ncc/cli.js" - } - }, - "node_modules/before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==", - "engines": { - "node": ">= 14" - } - } - }, - "dependencies": { - "@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "requires": { - "@actions/http-client": "^2.0.1" - } - }, - "@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "requires": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "requires": { - "tunnel": "^0.0.6" - } - }, - "@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "requires": { - "@octokit/types": "^6.0.3" - } - }, - "@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "requires": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "requires": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "requires": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "requires": { - "@octokit/types": "^6.40.0" - } - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "requires": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - } - }, - "@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "requires": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "requires": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true - }, - "before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "requires": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" - }, - "typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true - }, - "universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==" - } - } -} diff --git a/.github/actions/collect-charts/package.json b/.github/actions/collect-charts/package.json deleted file mode 100644 index e82ae2a2..00000000 --- a/.github/actions/collect-charts/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "description": "", - "main": "lib/main.js", - "scripts": { - "build": "tsc", - "package": "npm run build && NODE_OPTIONS=--openssl-legacy-provider ncc build --source-map --license licenses.txt" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "fs-extra": "^10.1.0", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } -} diff --git a/.github/actions/collect-charts/src/main.ts b/.github/actions/collect-charts/src/main.ts deleted file mode 100644 index 1d2b244d..00000000 --- a/.github/actions/collect-charts/src/main.ts +++ /dev/null @@ -1,205 +0,0 @@ -import * as core from "@actions/core"; -import * as github from "@actions/github"; -import * as path from "path"; -import * as YAML from "yaml"; -import * as fs from "fs-extra"; - -type FileStatus = "added" | "modified" | "removed" | "renamed"; - -function getErrorMessage(error: unknown) { - if (error instanceof Error) return error.message; - return String(error); -} - -async function requestAddedModifiedFiles( - baseCommit: string, - headCommit: string, - githubToken: string -) { - let result: string[] = []; - const octokit = github.getOctokit(githubToken); - - core.info(`Base commit: ${baseCommit}`); - core.info(`Head commit: ${headCommit}`); - - // Use GitHub's compare two commits API. - const response = await octokit.rest.repos.compareCommits({ - base: baseCommit, - head: headCommit, - owner: github.context.repo.owner, - repo: github.context.repo.repo, - }); - - // Ensure that the request was successful. - if (response.status !== 200) { - throw new Error( - `The GitHub API returned ${response.status}, expected 200.` - ); - } - - // Ensure that the head commit is ahead of the base commit. - if (response.data.status !== "ahead") { - throw new Error( - `The head commit for this ${github.context.eventName} event is not ahead of the base commit.` - ); - } - - const responseFiles = response.data.files || []; - responseFiles.forEach((file) => { - const filestatus = file.status as FileStatus; - if (filestatus == "added" || filestatus == "modified") { - result.push(file.filename); - } - }); - return result; -} - -async function requestAllFiles(commit: string, githubToken: string) { - let result: string[] = []; - const octokit = github.getOctokit(githubToken); - - core.info(`Commit SHA: ${commit}`); - - const response = await octokit.rest.git.getTree({ - tree_sha: commit, - owner: github.context.repo.owner, - repo: github.context.repo.repo, - recursive: "true", - }); - - // Ensure that the request was successful. - if (response.status !== 200) { - throw new Error( - `The GitHub API returned ${response.status}, expected 200.` - ); - } - - const responseTreeItems = response.data.tree || []; - responseTreeItems.forEach((item) => { - if (item.type == "blob" && item.path) { - result.push(item.path); - } - }); - return result; -} - -async function getRepoConfig(configPath: string) { - // Ensure that the repo config file exists. - if (!(await fs.pathExists(configPath))) { - throw new Error(`${configPath} Does not exist!`); - } - - const repoConfigRaw = await fs.readFile(configPath, "utf8"); - const repoConfig = await YAML.parse(repoConfigRaw); - return repoConfig; -} - -function filterChangedCharts(files: string[], parentFolder: string) { - const filteredChartFiles = files.filter((file) => { - const rel = path.relative(parentFolder, file); - return !rel.startsWith("../") && rel !== ".."; - }); - - let changedCharts: string[] = []; - filteredChartFiles.forEach((file) => { - const absoluteParentFolder = path.resolve(parentFolder); - const absoluteFileDirname = path.resolve(path.dirname(file)); - const relativeFileDirname = absoluteFileDirname.slice( - absoluteParentFolder.length + 1 - ); - const chartPathParts: string[] = relativeFileDirname.split("/"); - const chartType: string = chartPathParts[0]; - const chartName: string = chartPathParts[1]; - if (chartType && chartName) { - changedCharts.push(`${chartType}/${chartName}`); - } - }); - - // Return only unique items - return changedCharts.filter( - (item, index) => changedCharts.indexOf(item) === index - ); -} - -async function run() { - try { - const githubToken = core.getInput("token", { required: true }); - const chartsFolder = core.getInput("chartsFolder", { required: true }); - const repoConfigFilePath = core.getInput("repoConfigFile", { - required: true, - }); - let getAllCharts = core.getInput("getAllCharts", { required: false }); - const overrideCharts = core.getInput("overrideCharts", { required: false }); - - const repoConfig = await getRepoConfig(repoConfigFilePath); - core.info( - `Repo configuration: ${JSON.stringify(repoConfig, undefined, 2)}` - ); - - if (overrideCharts && overrideCharts != "[]") { - const responseCharts = YAML.parse(overrideCharts); - core.info(`Charts: ${JSON.stringify(responseCharts, undefined, 2)}`); - core.setOutput("charts", responseCharts); - return; - } - - const eventName = github.context.eventName; - - let baseCommit: string; - let headCommit: string; - - switch (eventName) { - case "pull_request": - baseCommit = github.context.payload.pull_request?.base?.sha; - headCommit = github.context.payload.pull_request?.head?.sha; - break; - case "push": - baseCommit = github.context.payload.before; - headCommit = github.context.payload.after; - break; - case "workflow_dispatch": - getAllCharts = "true"; - baseCommit = ""; - headCommit = github.context.sha; - break; - default: - throw new Error( - `This action only supports pull requests, pushes and workflow_dispatch,` + - `${github.context.eventName} events are not supported.` - ); - } - - let responseFiles: string[]; - if (getAllCharts === "true") { - responseFiles = await requestAllFiles(headCommit, githubToken); - } else { - responseFiles = await requestAddedModifiedFiles( - baseCommit, - headCommit, - githubToken - ); - } - - const changedCharts = filterChangedCharts(responseFiles, chartsFolder); - const chartsToInstall = changedCharts.filter( - (x) => !repoConfig["excluded-charts-install"].includes(x) - ); - const chartsToLint = changedCharts.filter( - (x) => !repoConfig["excluded-charts-lint"].includes(x) - ); - - core.info(`Charts: ${JSON.stringify(changedCharts, undefined, 2)}`); - core.info(`Charts to lint: ${JSON.stringify(chartsToLint, undefined, 2)}`); - core.info( - `Charts to install: ${JSON.stringify(chartsToInstall, undefined, 2)}` - ); - - core.setOutput("charts", changedCharts); - core.setOutput("chartsToInstall", chartsToInstall); - core.setOutput("chartsToLint", chartsToLint); - } catch (error) { - core.setFailed(getErrorMessage(error)); - } -} - -run(); diff --git a/.github/actions/collect-charts/tsconfig.json b/.github/actions/collect-charts/tsconfig.json deleted file mode 100644 index f6e7cb5b..00000000 --- a/.github/actions/collect-charts/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ - "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ - "outDir": "./lib", /* Redirect output structure to the directory. */ - "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ - "strict": true, /* Enable all strict type-checking options. */ - "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ - "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - }, - "exclude": ["node_modules", "**/*.test.ts"] -} diff --git a/.github/actions/verify-chart-changelog/.gitignore b/.github/actions/verify-chart-changelog/.gitignore deleted file mode 100644 index 3b4e8dcf..00000000 --- a/.github/actions/verify-chart-changelog/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -lib/ -node_modules/ diff --git a/.github/actions/verify-chart-changelog/action.yml b/.github/actions/verify-chart-changelog/action.yml deleted file mode 100644 index 4d7f0b2b..00000000 --- a/.github/actions/verify-chart-changelog/action.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: 'Verify Helm chart changelog' -description: 'Checks if the Helm chart changelog has been updated' -inputs: - chart: - description: 'Which chart to check' - required: true - base: - description: 'Ref to compare against' - required: false - token: - description: 'GitHub token' - default: '${{ github.token }}' - required: true -runs: - using: 'node16' - main: 'dist/index.js' diff --git a/.github/actions/verify-chart-changelog/dist/index.js b/.github/actions/verify-chart-changelog/dist/index.js deleted file mode 100644 index d31bc23f..00000000 --- a/.github/actions/verify-chart-changelog/dist/index.js +++ /dev/null @@ -1,28244 +0,0 @@ -require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap -/******/ var __webpack_modules__ = ({ - -/***/ 3109: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -const YAML = __importStar(__nccwpck_require__(5065)); -const schemas_1 = __nccwpck_require__(8774); -const fs = __importStar(__nccwpck_require__(5630)); -function getErrorMessage(error) { - if (error instanceof Error) - return error.message; - return String(error); -} -function getChangelogFromYaml(chartYaml) { - const changelogAnnotation = "artifacthub.io/changes"; - if (chartYaml.annotations) { - if (chartYaml.annotations[changelogAnnotation]) { - return chartYaml.annotations[changelogAnnotation]; - } - } - return undefined; -} -function getChartYamlFromRepo(path, ref, token) { - return __awaiter(this, void 0, void 0, function* () { - let result = {}; - const octokit = github.getOctokit(token); - const chartYamlFile = yield octokit.rest.repos.getContent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - path: `${path}`, - ref: ref, - }); - if (chartYamlFile && "content" in chartYamlFile.data) { - const originalChartYamlContent = Buffer.from(chartYamlFile.data.content, "base64").toString("utf-8"); - result = yield YAML.parse(originalChartYamlContent); - } - return result; - }); -} -function getChartYamlFromFile(path) { - return __awaiter(this, void 0, void 0, function* () { - const chartYamlFile = yield fs.readFile(path, "utf8"); - return YAML.parse(chartYamlFile); - }); -} -function checkRefExists(ref, token) { - return __awaiter(this, void 0, void 0, function* () { - const octokit = github.getOctokit(token); - try { - yield octokit.rest.git.getRef({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - ref: ref, - }); - } - catch (error) { - core.setFailed(`Ref ${ref} was not found for this repository!`); - return false; - } - return true; - }); -} -function run() { - var _a; - return __awaiter(this, void 0, void 0, function* () { - try { - if (github.context.eventName !== "pull_request") { - core.setFailed("This action can only run on pull requests!"); - return; - } - // Gather inputs - const githubToken = core.getInput("token"); - const chart = core.getInput("chart", { required: true }); - const base = core.getInput("base", { required: false }); - // Verify the base ref exists - if (base && !(yield checkRefExists(base, githubToken))) { - core.setFailed(`Ref ${base} was not found for this repository!`); - return; - } - // Determine the default branch - const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; - // Validate the chart - const chartYamlPath = `${chart}/Chart.yaml`; - if (!(yield fs.pathExists(chartYamlPath))) { - core.setFailed(`${chart} is not a valid Helm chart folder!`); - return; - } - var originalChartYaml; - var originalChartChangelog; - try { - originalChartYaml = yield getChartYamlFromRepo(chartYamlPath, base || `heads/${defaultBranch}`, githubToken); - originalChartChangelog = getChangelogFromYaml(originalChartYaml); - } - catch (error) { - core.warning(`Could not find original Chart.yaml for ${chart}, assuming this is a new chart.`); - } - const updatedChartYaml = yield getChartYamlFromFile(chartYamlPath); - const updatedChartChangelog = getChangelogFromYaml(updatedChartYaml); - if (!updatedChartChangelog) { - core.setFailed(`${chartYamlPath} does not contain a changelog!`); - return; - } - // Check if the changelog was updated - if (originalChartChangelog) { - if (updatedChartChangelog == originalChartChangelog) { - core.setFailed(`Chart changelog has not been updated!`); - return; - } - } - // Validate the changelog entries - const changelogEntries = YAML.parse(updatedChartChangelog); - changelogEntries.forEach((entry) => { - var _a; - const validator = (0, schemas_1.validateAgainstJsonSchema)(entry, schemas_1.changelogEntrySchema); - if (!validator.valid) { - (_a = validator.errors) === null || _a === void 0 ? void 0 : _a.forEach((error) => { - core.setFailed(`${chart} changelog validation failed: ${JSON.stringify(error)}`); - }); - } - }); - } - catch (error) { - core.setFailed(getErrorMessage(error)); - } - }); -} -run(); - - -/***/ }), - -/***/ 8774: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateAgainstJsonSchema = exports.changelogEntrySchema = void 0; -const ajv_1 = __importDefault(__nccwpck_require__(2426)); -const ajv_formats_1 = __importDefault(__nccwpck_require__(567)); -exports.changelogEntrySchema = { - type: "object", - properties: { - kind: { - type: "string", - enum: ["added", "changed", "deprecated", "removed", "fixed", "security"], - }, - description: { type: "string" }, - links: { - type: "array", - items: { - type: "object", - properties: { - name: { type: "string" }, - url: { $ref: "#/definitions/saneUrl" }, - }, - required: ["name", "url"], - additionalProperties: false, - }, - }, - }, - required: ["kind", "description"], - additionalProperties: false, - definitions: { - saneUrl: { - type: "string", - format: "uri", - pattern: "^https?://", - }, - }, -}; -function validateAgainstJsonSchema(object, schema) { - const ajv = new ajv_1.default(); - (0, ajv_formats_1.default)(ajv); - const validator = ajv.compile(schema); - return { - valid: validator(object), - errors: validator.errors, - }; -} -exports.validateAgainstJsonSchema = validateAgainstJsonSchema; - - -/***/ }), - -/***/ 7351: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 2186: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(7351); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2087)); -const path = __importStar(__nccwpck_require__(5622)); -const oidc_utils_1 = __nccwpck_require__(8041); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); - } -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueCommand('PATH', inputPath); - } - else { - command_1.issueCommand('add-path', {}, inputPath); - } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Gets the values of an multiline input. Each value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string[] - * - */ -function getMultilineInput(name, options) { - const inputs = getInput(name, options) - .split('\n') - .filter(x => x !== ''); - return inputs; -} -exports.getMultilineInput = getMultilineInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds a warning issue - * @param message warning issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Adds a notice issue - * @param message notice issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.notice = notice; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -function getIDToken(aud) { - return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); -} -exports.getIDToken = getIDToken; -/** - * Summary exports - */ -var summary_1 = __nccwpck_require__(1327); -Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); -/** - * @deprecated use core.summary - */ -var summary_2 = __nccwpck_require__(1327); -Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); -/** - * Path exports - */ -var path_utils_1 = __nccwpck_require__(2981); -Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); -Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); -Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 717: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(5747)); -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map - -/***/ }), - -/***/ 8041: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); -class OidcClient { - static createHttpClient(allowRetry = true, maxRetry = 10) { - const requestOptions = { - allowRetries: allowRetry, - maxRetries: maxRetry - }; - return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); - } - static getRequestToken() { - const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); - } - return token; - } - static getIDTokenUrl() { - const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); - } - return runtimeUrl; - } - static getCall(id_token_url) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const httpclient = OidcClient.createHttpClient(); - const res = yield httpclient - .getJson(id_token_url) - .catch(error => { - throw new Error(`Failed to get ID Token. \n - Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); - }); - const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; - if (!id_token) { - throw new Error('Response json body do not have ID Token field'); - } - return id_token; - }); - } - static getIDToken(audience) { - return __awaiter(this, void 0, void 0, function* () { - try { - // New ID Token is requested from action service - let id_token_url = OidcClient.getIDTokenUrl(); - if (audience) { - const encodedAudience = encodeURIComponent(audience); - id_token_url = `${id_token_url}&audience=${encodedAudience}`; - } - core_1.debug(`ID token url is ${id_token_url}`); - const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); - return id_token; - } - catch (error) { - throw new Error(`Error message: ${error.message}`); - } - }); - } -} -exports.OidcClient = OidcClient; -//# sourceMappingURL=oidc-utils.js.map - -/***/ }), - -/***/ 2981: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(5622)); -/** - * toPosixPath converts the given path to the posix form. On Windows, \\ will be - * replaced with /. - * - * @param pth. Path to transform. - * @return string Posix path. - */ -function toPosixPath(pth) { - return pth.replace(/[\\]/g, '/'); -} -exports.toPosixPath = toPosixPath; -/** - * toWin32Path converts the given path to the win32 form. On Linux, / will be - * replaced with \\. - * - * @param pth. Path to transform. - * @return string Win32 path. - */ -function toWin32Path(pth) { - return pth.replace(/[/]/g, '\\'); -} -exports.toWin32Path = toWin32Path; -/** - * toPlatformPath converts the given path to a platform-specific path. It does - * this by replacing instances of / and \ with the platform-specific path - * separator. - * - * @param pth The path to platformize. - * @return string The platform-specific path. - */ -function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path.sep); -} -exports.toPlatformPath = toPlatformPath; -//# sourceMappingURL=path-utils.js.map - -/***/ }), - -/***/ 1327: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(2087); -const fs_1 = __nccwpck_require__(5747); -const { access, appendFile, writeFile } = fs_1.promises; -exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; -exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; -class Summary { - constructor() { - this._buffer = ''; - } - /** - * Finds the summary file path from the environment, rejects if env var is not found or file does not exist - * Also checks r/w permissions. - * - * @returns step summary file path - */ - filePath() { - return __awaiter(this, void 0, void 0, function* () { - if (this._filePath) { - return this._filePath; - } - const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; - if (!pathFromEnv) { - throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); - } - try { - yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); - } - catch (_a) { - throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); - } - this._filePath = pathFromEnv; - return this._filePath; - }); - } - /** - * Wraps content in an HTML tag, adding any HTML attributes - * - * @param {string} tag HTML tag to wrap - * @param {string | null} content content within the tag - * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add - * - * @returns {string} content wrapped in HTML element - */ - wrap(tag, content, attrs = {}) { - const htmlAttrs = Object.entries(attrs) - .map(([key, value]) => ` ${key}="${value}"`) - .join(''); - if (!content) { - return `<${tag}${htmlAttrs}>`; - } - return `<${tag}${htmlAttrs}>${content}`; - } - /** - * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. - * - * @param {SummaryWriteOptions} [options] (optional) options for write operation - * - * @returns {Promise} summary instance - */ - write(options) { - return __awaiter(this, void 0, void 0, function* () { - const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); - const filePath = yield this.filePath(); - const writeFunc = overwrite ? writeFile : appendFile; - yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); - return this.emptyBuffer(); - }); - } - /** - * Clears the summary buffer and wipes the summary file - * - * @returns {Summary} summary instance - */ - clear() { - return __awaiter(this, void 0, void 0, function* () { - return this.emptyBuffer().write({ overwrite: true }); - }); - } - /** - * Returns the current summary buffer as a string - * - * @returns {string} string of summary buffer - */ - stringify() { - return this._buffer; - } - /** - * If the summary buffer is empty - * - * @returns {boolen} true if the buffer is empty - */ - isEmptyBuffer() { - return this._buffer.length === 0; - } - /** - * Resets the summary buffer without writing to summary file - * - * @returns {Summary} summary instance - */ - emptyBuffer() { - this._buffer = ''; - return this; - } - /** - * Adds raw text to the summary buffer - * - * @param {string} text content to add - * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) - * - * @returns {Summary} summary instance - */ - addRaw(text, addEOL = false) { - this._buffer += text; - return addEOL ? this.addEOL() : this; - } - /** - * Adds the operating system-specific end-of-line marker to the buffer - * - * @returns {Summary} summary instance - */ - addEOL() { - return this.addRaw(os_1.EOL); - } - /** - * Adds an HTML codeblock to the summary buffer - * - * @param {string} code content to render within fenced code block - * @param {string} lang (optional) language to syntax highlight code - * - * @returns {Summary} summary instance - */ - addCodeBlock(code, lang) { - const attrs = Object.assign({}, (lang && { lang })); - const element = this.wrap('pre', this.wrap('code', code), attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML list to the summary buffer - * - * @param {string[]} items list of items to render - * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) - * - * @returns {Summary} summary instance - */ - addList(items, ordered = false) { - const tag = ordered ? 'ol' : 'ul'; - const listItems = items.map(item => this.wrap('li', item)).join(''); - const element = this.wrap(tag, listItems); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML table to the summary buffer - * - * @param {SummaryTableCell[]} rows table rows - * - * @returns {Summary} summary instance - */ - addTable(rows) { - const tableBody = rows - .map(row => { - const cells = row - .map(cell => { - if (typeof cell === 'string') { - return this.wrap('td', cell); - } - const { header, data, colspan, rowspan } = cell; - const tag = header ? 'th' : 'td'; - const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); - return this.wrap(tag, data, attrs); - }) - .join(''); - return this.wrap('tr', cells); - }) - .join(''); - const element = this.wrap('table', tableBody); - return this.addRaw(element).addEOL(); - } - /** - * Adds a collapsable HTML details element to the summary buffer - * - * @param {string} label text for the closed state - * @param {string} content collapsable content - * - * @returns {Summary} summary instance - */ - addDetails(label, content) { - const element = this.wrap('details', this.wrap('summary', label) + content); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML image tag to the summary buffer - * - * @param {string} src path to the image you to embed - * @param {string} alt text description of the image - * @param {SummaryImageOptions} options (optional) addition image attributes - * - * @returns {Summary} summary instance - */ - addImage(src, alt, options) { - const { width, height } = options || {}; - const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); - const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML section heading element - * - * @param {string} text heading text - * @param {number | string} [level=1] (optional) the heading level, default: 1 - * - * @returns {Summary} summary instance - */ - addHeading(text, level) { - const tag = `h${level}`; - const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) - ? tag - : 'h1'; - const element = this.wrap(allowedTag, text); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML thematic break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addSeparator() { - const element = this.wrap('hr', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML line break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addBreak() { - const element = this.wrap('br', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML blockquote to the summary buffer - * - * @param {string} text quote text - * @param {string} cite (optional) citation url - * - * @returns {Summary} summary instance - */ - addQuote(text, cite) { - const attrs = Object.assign({}, (cite && { cite })); - const element = this.wrap('blockquote', text, attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML anchor tag to the summary buffer - * - * @param {string} text link text/content - * @param {string} href hyperlink - * - * @returns {Summary} summary instance - */ - addLink(text, href) { - const element = this.wrap('a', text, { href }); - return this.addRaw(element).addEOL(); - } -} -const _summary = new Summary(); -/** - * @deprecated use `core.summary` - */ -exports.markdownSummary = _summary; -exports.summary = _summary; -//# sourceMappingURL=summary.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandProperties = exports.toCommandValue = void 0; -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -/** - * - * @param annotationProperties - * @returns The command properties to send with the actual annotation command - * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 - */ -function toCommandProperties(annotationProperties) { - if (!Object.keys(annotationProperties).length) { - return {}; - } - return { - title: annotationProperties.title, - file: annotationProperties.file, - line: annotationProperties.startLine, - endLine: annotationProperties.endLine, - col: annotationProperties.startColumn, - endColumn: annotationProperties.endColumn - }; -} -exports.toCommandProperties = toCommandProperties; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(5747); -const os_1 = __nccwpck_require__(2087); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 5438: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); -} -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 7914: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6255)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 3030: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); -// octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 5526: -/***/ (function(__unused_webpack_module, exports) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map - -/***/ }), - -/***/ 6255: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(8605)); -const https = __importStar(__nccwpck_require__(7211)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } - } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); - } -} -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 9835: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; -//# sourceMappingURL=proxy.js.map - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -const Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] - }, - dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], - addEmailForAuthenticatedUser: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { - rest: api - }); -} -legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - let headers; - - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } // redact request credentials without mutating original request options - - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - - Object.defineProperty(this, "code", { - get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); - return headers || {}; - } - - }); - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); - -const VERSION = "5.6.3"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: undefined - }, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - - if (status >= 400) { - const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - - return getResponseData(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { - request: requestOptions - }); - }); -} - -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); -} - -function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; - } - - return data.message; - } // istanbul ignore next - just in case - - - return `Unknown error: ${JSON.stringify(data)}`; -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 407: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.formatNames = exports.fastFormats = exports.fullFormats = void 0; -function fmtDef(validate, compare) { - return { validate, compare }; -} -exports.fullFormats = { - // date: http://tools.ietf.org/html/rfc3339#section-5.6 - date: fmtDef(date, compareDate), - // date-time: http://tools.ietf.org/html/rfc3339#section-5.6 - time: fmtDef(time, compareTime), - "date-time": fmtDef(date_time, compareDateTime), - // duration: https://tools.ietf.org/html/rfc3339#appendix-A - duration: /^P(?!$)((\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+S)?)?|(\d+W)?)$/, - uri, - "uri-reference": /^(?:[a-z][a-z0-9+\-.]*:)?(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'"()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?(?:\?(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i, - // uri-template: https://tools.ietf.org/html/rfc6570 - "uri-template": /^(?:(?:[^\x00-\x20"'<>%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i, - // For the source: https://gist.github.com/dperini/729294 - // For test cases: https://mathiasbynens.be/demo/url-regex - url: /^(?:https?|ftp):\/\/(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)(?:\.(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)*(?:\.(?:[a-z\u{00a1}-\u{ffff}]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/iu, - email: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i, - hostname: /^(?=.{1,253}\.?$)[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[-0-9a-z]{0,61}[0-9a-z])?)*\.?$/i, - // optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html - ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/, - ipv6: /^((([0-9a-f]{1,4}:){7}([0-9a-f]{1,4}|:))|(([0-9a-f]{1,4}:){6}(:[0-9a-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){5}(((:[0-9a-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){4}(((:[0-9a-f]{1,4}){1,3})|((:[0-9a-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){3}(((:[0-9a-f]{1,4}){1,4})|((:[0-9a-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){2}(((:[0-9a-f]{1,4}){1,5})|((:[0-9a-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){1}(((:[0-9a-f]{1,4}){1,6})|((:[0-9a-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9a-f]{1,4}){1,7})|((:[0-9a-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))$/i, - regex, - // uuid: http://tools.ietf.org/html/rfc4122 - uuid: /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i, - // JSON-pointer: https://tools.ietf.org/html/rfc6901 - // uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A - "json-pointer": /^(?:\/(?:[^~/]|~0|~1)*)*$/, - "json-pointer-uri-fragment": /^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i, - // relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00 - "relative-json-pointer": /^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/, - // the following formats are used by the openapi specification: https://spec.openapis.org/oas/v3.0.0#data-types - // byte: https://github.com/miguelmota/is-base64 - byte, - // signed 32 bit integer - int32: { type: "number", validate: validateInt32 }, - // signed 64 bit integer - int64: { type: "number", validate: validateInt64 }, - // C-type float - float: { type: "number", validate: validateNumber }, - // C-type double - double: { type: "number", validate: validateNumber }, - // hint to the UI to hide input strings - password: true, - // unchecked string payload - binary: true, -}; -exports.fastFormats = { - ...exports.fullFormats, - date: fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\d$/, compareDate), - time: fmtDef(/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i, compareTime), - "date-time": fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i, compareDateTime), - // uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js - uri: /^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/)?[^\s]*$/i, - "uri-reference": /^(?:(?:[a-z][a-z0-9+\-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i, - // email (sources from jsen validator): - // http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363 - // http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'wilful violation') - email: /^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i, -}; -exports.formatNames = Object.keys(exports.fullFormats); -function isLeapYear(year) { - // https://tools.ietf.org/html/rfc3339#appendix-C - return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); -} -const DATE = /^(\d\d\d\d)-(\d\d)-(\d\d)$/; -const DAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; -function date(str) { - // full-date from http://tools.ietf.org/html/rfc3339#section-5.6 - const matches = DATE.exec(str); - if (!matches) - return false; - const year = +matches[1]; - const month = +matches[2]; - const day = +matches[3]; - return (month >= 1 && - month <= 12 && - day >= 1 && - day <= (month === 2 && isLeapYear(year) ? 29 : DAYS[month])); -} -function compareDate(d1, d2) { - if (!(d1 && d2)) - return undefined; - if (d1 > d2) - return 1; - if (d1 < d2) - return -1; - return 0; -} -const TIME = /^(\d\d):(\d\d):(\d\d)(\.\d+)?(z|[+-]\d\d(?::?\d\d)?)?$/i; -function time(str, withTimeZone) { - const matches = TIME.exec(str); - if (!matches) - return false; - const hour = +matches[1]; - const minute = +matches[2]; - const second = +matches[3]; - const timeZone = matches[5]; - return (((hour <= 23 && minute <= 59 && second <= 59) || - (hour === 23 && minute === 59 && second === 60)) && - (!withTimeZone || timeZone !== "")); -} -function compareTime(t1, t2) { - if (!(t1 && t2)) - return undefined; - const a1 = TIME.exec(t1); - const a2 = TIME.exec(t2); - if (!(a1 && a2)) - return undefined; - t1 = a1[1] + a1[2] + a1[3] + (a1[4] || ""); - t2 = a2[1] + a2[2] + a2[3] + (a2[4] || ""); - if (t1 > t2) - return 1; - if (t1 < t2) - return -1; - return 0; -} -const DATE_TIME_SEPARATOR = /t|\s/i; -function date_time(str) { - // http://tools.ietf.org/html/rfc3339#section-5.6 - const dateTime = str.split(DATE_TIME_SEPARATOR); - return dateTime.length === 2 && date(dateTime[0]) && time(dateTime[1], true); -} -function compareDateTime(dt1, dt2) { - if (!(dt1 && dt2)) - return undefined; - const [d1, t1] = dt1.split(DATE_TIME_SEPARATOR); - const [d2, t2] = dt2.split(DATE_TIME_SEPARATOR); - const res = compareDate(d1, d2); - if (res === undefined) - return undefined; - return res || compareTime(t1, t2); -} -const NOT_URI_FRAGMENT = /\/|:/; -const URI = /^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)(?:\?(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i; -function uri(str) { - // http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "." - return NOT_URI_FRAGMENT.test(str) && URI.test(str); -} -const BYTE = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/gm; -function byte(str) { - BYTE.lastIndex = 0; - return BYTE.test(str); -} -const MIN_INT32 = -(2 ** 31); -const MAX_INT32 = 2 ** 31 - 1; -function validateInt32(value) { - return Number.isInteger(value) && value <= MAX_INT32 && value >= MIN_INT32; -} -function validateInt64(value) { - // JSON and javascript max Int is 2**53, so any int that passes isInteger is valid for Int64 - return Number.isInteger(value); -} -function validateNumber() { - return true; -} -const Z_ANCHOR = /[^\\]\\Z/; -function regex(str) { - if (Z_ANCHOR.test(str)) - return false; - try { - new RegExp(str); - return true; - } - catch (e) { - return false; - } -} -//# sourceMappingURL=formats.js.map - -/***/ }), - -/***/ 567: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const formats_1 = __nccwpck_require__(407); -const limit_1 = __nccwpck_require__(8433); -const codegen_1 = __nccwpck_require__(9179); -const fullName = new codegen_1.Name("fullFormats"); -const fastName = new codegen_1.Name("fastFormats"); -const formatsPlugin = (ajv, opts = { keywords: true }) => { - if (Array.isArray(opts)) { - addFormats(ajv, opts, formats_1.fullFormats, fullName); - return ajv; - } - const [formats, exportName] = opts.mode === "fast" ? [formats_1.fastFormats, fastName] : [formats_1.fullFormats, fullName]; - const list = opts.formats || formats_1.formatNames; - addFormats(ajv, list, formats, exportName); - if (opts.keywords) - limit_1.default(ajv); - return ajv; -}; -formatsPlugin.get = (name, mode = "full") => { - const formats = mode === "fast" ? formats_1.fastFormats : formats_1.fullFormats; - const f = formats[name]; - if (!f) - throw new Error(`Unknown format "${name}"`); - return f; -}; -function addFormats(ajv, list, fs, exportName) { - var _a; - var _b; - (_a = (_b = ajv.opts.code).formats) !== null && _a !== void 0 ? _a : (_b.formats = codegen_1._ `require("ajv-formats/dist/formats").${exportName}`); - for (const f of list) - ajv.addFormat(f, fs[f]); -} -module.exports = exports = formatsPlugin; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = formatsPlugin; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 8433: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.formatLimitDefinition = void 0; -const ajv_1 = __nccwpck_require__(2426); -const codegen_1 = __nccwpck_require__(9179); -const ops = codegen_1.operators; -const KWDs = { - formatMaximum: { okStr: "<=", ok: ops.LTE, fail: ops.GT }, - formatMinimum: { okStr: ">=", ok: ops.GTE, fail: ops.LT }, - formatExclusiveMaximum: { okStr: "<", ok: ops.LT, fail: ops.GTE }, - formatExclusiveMinimum: { okStr: ">", ok: ops.GT, fail: ops.LTE }, -}; -const error = { - message: ({ keyword, schemaCode }) => codegen_1.str `should be ${KWDs[keyword].okStr} ${schemaCode}`, - params: ({ keyword, schemaCode }) => codegen_1._ `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`, -}; -exports.formatLimitDefinition = { - keyword: Object.keys(KWDs), - type: "string", - schemaType: "string", - $data: true, - error, - code(cxt) { - const { gen, data, schemaCode, keyword, it } = cxt; - const { opts, self } = it; - if (!opts.validateFormats) - return; - const fCxt = new ajv_1.KeywordCxt(it, self.RULES.all.format.definition, "format"); - if (fCxt.$data) - validate$DataFormat(); - else - validateFormat(); - function validate$DataFormat() { - const fmts = gen.scopeValue("formats", { - ref: self.formats, - code: opts.code.formats, - }); - const fmt = gen.const("fmt", codegen_1._ `${fmts}[${fCxt.schemaCode}]`); - cxt.fail$data(codegen_1.or(codegen_1._ `typeof ${fmt} != "object"`, codegen_1._ `${fmt} instanceof RegExp`, codegen_1._ `typeof ${fmt}.compare != "function"`, compareCode(fmt))); - } - function validateFormat() { - const format = fCxt.schema; - const fmtDef = self.formats[format]; - if (!fmtDef || fmtDef === true) - return; - if (typeof fmtDef != "object" || - fmtDef instanceof RegExp || - typeof fmtDef.compare != "function") { - throw new Error(`"${keyword}": format "${format}" does not define "compare" function`); - } - const fmt = gen.scopeValue("formats", { - key: format, - ref: fmtDef, - code: opts.code.formats ? codegen_1._ `${opts.code.formats}${codegen_1.getProperty(format)}` : undefined, - }); - cxt.fail$data(compareCode(fmt)); - } - function compareCode(fmt) { - return codegen_1._ `${fmt}.compare(${data}, ${schemaCode}) ${KWDs[keyword].fail} 0`; - } - }, - dependencies: ["format"], -}; -const formatLimitPlugin = (ajv) => { - ajv.addKeyword(exports.formatLimitDefinition); - return ajv; -}; -exports.default = formatLimitPlugin; -//# sourceMappingURL=limit.js.map - -/***/ }), - -/***/ 2426: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0; -const core_1 = __nccwpck_require__(2685); -const draft7_1 = __nccwpck_require__(691); -const discriminator_1 = __nccwpck_require__(4025); -const draft7MetaSchema = __nccwpck_require__(278); -const META_SUPPORT_DATA = ["/properties"]; -const META_SCHEMA_ID = "http://json-schema.org/draft-07/schema"; -class Ajv extends core_1.default { - _addVocabularies() { - super._addVocabularies(); - draft7_1.default.forEach((v) => this.addVocabulary(v)); - if (this.opts.discriminator) - this.addKeyword(discriminator_1.default); - } - _addDefaultMetaSchema() { - super._addDefaultMetaSchema(); - if (!this.opts.meta) - return; - const metaSchema = this.opts.$data - ? this.$dataMetaSchema(draft7MetaSchema, META_SUPPORT_DATA) - : draft7MetaSchema; - this.addMetaSchema(metaSchema, META_SCHEMA_ID, false); - this.refs["http://json-schema.org/schema"] = META_SCHEMA_ID; - } - defaultMeta() { - return (this.opts.defaultMeta = - super.defaultMeta() || (this.getSchema(META_SCHEMA_ID) ? META_SCHEMA_ID : undefined)); - } -} -module.exports = exports = Ajv; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = Ajv; -var validate_1 = __nccwpck_require__(8955); -Object.defineProperty(exports, "KeywordCxt", ({ enumerable: true, get: function () { return validate_1.KeywordCxt; } })); -var codegen_1 = __nccwpck_require__(9179); -Object.defineProperty(exports, "_", ({ enumerable: true, get: function () { return codegen_1._; } })); -Object.defineProperty(exports, "str", ({ enumerable: true, get: function () { return codegen_1.str; } })); -Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return codegen_1.stringify; } })); -Object.defineProperty(exports, "nil", ({ enumerable: true, get: function () { return codegen_1.nil; } })); -Object.defineProperty(exports, "Name", ({ enumerable: true, get: function () { return codegen_1.Name; } })); -Object.defineProperty(exports, "CodeGen", ({ enumerable: true, get: function () { return codegen_1.CodeGen; } })); -//# sourceMappingURL=ajv.js.map - -/***/ }), - -/***/ 8358: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.regexpCode = exports.getEsmExportName = exports.getProperty = exports.safeStringify = exports.stringify = exports.strConcat = exports.addCodeArg = exports.str = exports._ = exports.nil = exports._Code = exports.Name = exports.IDENTIFIER = exports._CodeOrName = void 0; -class _CodeOrName { -} -exports._CodeOrName = _CodeOrName; -exports.IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i; -class Name extends _CodeOrName { - constructor(s) { - super(); - if (!exports.IDENTIFIER.test(s)) - throw new Error("CodeGen: name must be a valid identifier"); - this.str = s; - } - toString() { - return this.str; - } - emptyStr() { - return false; - } - get names() { - return { [this.str]: 1 }; - } -} -exports.Name = Name; -class _Code extends _CodeOrName { - constructor(code) { - super(); - this._items = typeof code === "string" ? [code] : code; - } - toString() { - return this.str; - } - emptyStr() { - if (this._items.length > 1) - return false; - const item = this._items[0]; - return item === "" || item === '""'; - } - get str() { - var _a; - return ((_a = this._str) !== null && _a !== void 0 ? _a : (this._str = this._items.reduce((s, c) => `${s}${c}`, ""))); - } - get names() { - var _a; - return ((_a = this._names) !== null && _a !== void 0 ? _a : (this._names = this._items.reduce((names, c) => { - if (c instanceof Name) - names[c.str] = (names[c.str] || 0) + 1; - return names; - }, {}))); - } -} -exports._Code = _Code; -exports.nil = new _Code(""); -function _(strs, ...args) { - const code = [strs[0]]; - let i = 0; - while (i < args.length) { - addCodeArg(code, args[i]); - code.push(strs[++i]); - } - return new _Code(code); -} -exports._ = _; -const plus = new _Code("+"); -function str(strs, ...args) { - const expr = [safeStringify(strs[0])]; - let i = 0; - while (i < args.length) { - expr.push(plus); - addCodeArg(expr, args[i]); - expr.push(plus, safeStringify(strs[++i])); - } - optimize(expr); - return new _Code(expr); -} -exports.str = str; -function addCodeArg(code, arg) { - if (arg instanceof _Code) - code.push(...arg._items); - else if (arg instanceof Name) - code.push(arg); - else - code.push(interpolate(arg)); -} -exports.addCodeArg = addCodeArg; -function optimize(expr) { - let i = 1; - while (i < expr.length - 1) { - if (expr[i] === plus) { - const res = mergeExprItems(expr[i - 1], expr[i + 1]); - if (res !== undefined) { - expr.splice(i - 1, 3, res); - continue; - } - expr[i++] = "+"; - } - i++; - } -} -function mergeExprItems(a, b) { - if (b === '""') - return a; - if (a === '""') - return b; - if (typeof a == "string") { - if (b instanceof Name || a[a.length - 1] !== '"') - return; - if (typeof b != "string") - return `${a.slice(0, -1)}${b}"`; - if (b[0] === '"') - return a.slice(0, -1) + b.slice(1); - return; - } - if (typeof b == "string" && b[0] === '"' && !(a instanceof Name)) - return `"${a}${b.slice(1)}`; - return; -} -function strConcat(c1, c2) { - return c2.emptyStr() ? c1 : c1.emptyStr() ? c2 : str `${c1}${c2}`; -} -exports.strConcat = strConcat; -// TODO do not allow arrays here -function interpolate(x) { - return typeof x == "number" || typeof x == "boolean" || x === null - ? x - : safeStringify(Array.isArray(x) ? x.join(",") : x); -} -function stringify(x) { - return new _Code(safeStringify(x)); -} -exports.stringify = stringify; -function safeStringify(x) { - return JSON.stringify(x) - .replace(/\u2028/g, "\\u2028") - .replace(/\u2029/g, "\\u2029"); -} -exports.safeStringify = safeStringify; -function getProperty(key) { - return typeof key == "string" && exports.IDENTIFIER.test(key) ? new _Code(`.${key}`) : _ `[${key}]`; -} -exports.getProperty = getProperty; -//Does best effort to format the name properly -function getEsmExportName(key) { - if (typeof key == "string" && exports.IDENTIFIER.test(key)) { - return new _Code(`${key}`); - } - throw new Error(`CodeGen: invalid export name: ${key}, use explicit $id name mapping`); -} -exports.getEsmExportName = getEsmExportName; -function regexpCode(rx) { - return new _Code(rx.toString()); -} -exports.regexpCode = regexpCode; -//# sourceMappingURL=code.js.map - -/***/ }), - -/***/ 9179: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.or = exports.and = exports.not = exports.CodeGen = exports.operators = exports.varKinds = exports.ValueScopeName = exports.ValueScope = exports.Scope = exports.Name = exports.regexpCode = exports.stringify = exports.getProperty = exports.nil = exports.strConcat = exports.str = exports._ = void 0; -const code_1 = __nccwpck_require__(8358); -const scope_1 = __nccwpck_require__(2893); -var code_2 = __nccwpck_require__(8358); -Object.defineProperty(exports, "_", ({ enumerable: true, get: function () { return code_2._; } })); -Object.defineProperty(exports, "str", ({ enumerable: true, get: function () { return code_2.str; } })); -Object.defineProperty(exports, "strConcat", ({ enumerable: true, get: function () { return code_2.strConcat; } })); -Object.defineProperty(exports, "nil", ({ enumerable: true, get: function () { return code_2.nil; } })); -Object.defineProperty(exports, "getProperty", ({ enumerable: true, get: function () { return code_2.getProperty; } })); -Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return code_2.stringify; } })); -Object.defineProperty(exports, "regexpCode", ({ enumerable: true, get: function () { return code_2.regexpCode; } })); -Object.defineProperty(exports, "Name", ({ enumerable: true, get: function () { return code_2.Name; } })); -var scope_2 = __nccwpck_require__(2893); -Object.defineProperty(exports, "Scope", ({ enumerable: true, get: function () { return scope_2.Scope; } })); -Object.defineProperty(exports, "ValueScope", ({ enumerable: true, get: function () { return scope_2.ValueScope; } })); -Object.defineProperty(exports, "ValueScopeName", ({ enumerable: true, get: function () { return scope_2.ValueScopeName; } })); -Object.defineProperty(exports, "varKinds", ({ enumerable: true, get: function () { return scope_2.varKinds; } })); -exports.operators = { - GT: new code_1._Code(">"), - GTE: new code_1._Code(">="), - LT: new code_1._Code("<"), - LTE: new code_1._Code("<="), - EQ: new code_1._Code("==="), - NEQ: new code_1._Code("!=="), - NOT: new code_1._Code("!"), - OR: new code_1._Code("||"), - AND: new code_1._Code("&&"), - ADD: new code_1._Code("+"), -}; -class Node { - optimizeNodes() { - return this; - } - optimizeNames(_names, _constants) { - return this; - } -} -class Def extends Node { - constructor(varKind, name, rhs) { - super(); - this.varKind = varKind; - this.name = name; - this.rhs = rhs; - } - render({ es5, _n }) { - const varKind = es5 ? scope_1.varKinds.var : this.varKind; - const rhs = this.rhs === undefined ? "" : ` = ${this.rhs}`; - return `${varKind} ${this.name}${rhs};` + _n; - } - optimizeNames(names, constants) { - if (!names[this.name.str]) - return; - if (this.rhs) - this.rhs = optimizeExpr(this.rhs, names, constants); - return this; - } - get names() { - return this.rhs instanceof code_1._CodeOrName ? this.rhs.names : {}; - } -} -class Assign extends Node { - constructor(lhs, rhs, sideEffects) { - super(); - this.lhs = lhs; - this.rhs = rhs; - this.sideEffects = sideEffects; - } - render({ _n }) { - return `${this.lhs} = ${this.rhs};` + _n; - } - optimizeNames(names, constants) { - if (this.lhs instanceof code_1.Name && !names[this.lhs.str] && !this.sideEffects) - return; - this.rhs = optimizeExpr(this.rhs, names, constants); - return this; - } - get names() { - const names = this.lhs instanceof code_1.Name ? {} : { ...this.lhs.names }; - return addExprNames(names, this.rhs); - } -} -class AssignOp extends Assign { - constructor(lhs, op, rhs, sideEffects) { - super(lhs, rhs, sideEffects); - this.op = op; - } - render({ _n }) { - return `${this.lhs} ${this.op}= ${this.rhs};` + _n; - } -} -class Label extends Node { - constructor(label) { - super(); - this.label = label; - this.names = {}; - } - render({ _n }) { - return `${this.label}:` + _n; - } -} -class Break extends Node { - constructor(label) { - super(); - this.label = label; - this.names = {}; - } - render({ _n }) { - const label = this.label ? ` ${this.label}` : ""; - return `break${label};` + _n; - } -} -class Throw extends Node { - constructor(error) { - super(); - this.error = error; - } - render({ _n }) { - return `throw ${this.error};` + _n; - } - get names() { - return this.error.names; - } -} -class AnyCode extends Node { - constructor(code) { - super(); - this.code = code; - } - render({ _n }) { - return `${this.code};` + _n; - } - optimizeNodes() { - return `${this.code}` ? this : undefined; - } - optimizeNames(names, constants) { - this.code = optimizeExpr(this.code, names, constants); - return this; - } - get names() { - return this.code instanceof code_1._CodeOrName ? this.code.names : {}; - } -} -class ParentNode extends Node { - constructor(nodes = []) { - super(); - this.nodes = nodes; - } - render(opts) { - return this.nodes.reduce((code, n) => code + n.render(opts), ""); - } - optimizeNodes() { - const { nodes } = this; - let i = nodes.length; - while (i--) { - const n = nodes[i].optimizeNodes(); - if (Array.isArray(n)) - nodes.splice(i, 1, ...n); - else if (n) - nodes[i] = n; - else - nodes.splice(i, 1); - } - return nodes.length > 0 ? this : undefined; - } - optimizeNames(names, constants) { - const { nodes } = this; - let i = nodes.length; - while (i--) { - // iterating backwards improves 1-pass optimization - const n = nodes[i]; - if (n.optimizeNames(names, constants)) - continue; - subtractNames(names, n.names); - nodes.splice(i, 1); - } - return nodes.length > 0 ? this : undefined; - } - get names() { - return this.nodes.reduce((names, n) => addNames(names, n.names), {}); - } -} -class BlockNode extends ParentNode { - render(opts) { - return "{" + opts._n + super.render(opts) + "}" + opts._n; - } -} -class Root extends ParentNode { -} -class Else extends BlockNode { -} -Else.kind = "else"; -class If extends BlockNode { - constructor(condition, nodes) { - super(nodes); - this.condition = condition; - } - render(opts) { - let code = `if(${this.condition})` + super.render(opts); - if (this.else) - code += "else " + this.else.render(opts); - return code; - } - optimizeNodes() { - super.optimizeNodes(); - const cond = this.condition; - if (cond === true) - return this.nodes; // else is ignored here - let e = this.else; - if (e) { - const ns = e.optimizeNodes(); - e = this.else = Array.isArray(ns) ? new Else(ns) : ns; - } - if (e) { - if (cond === false) - return e instanceof If ? e : e.nodes; - if (this.nodes.length) - return this; - return new If(not(cond), e instanceof If ? [e] : e.nodes); - } - if (cond === false || !this.nodes.length) - return undefined; - return this; - } - optimizeNames(names, constants) { - var _a; - this.else = (_a = this.else) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants); - if (!(super.optimizeNames(names, constants) || this.else)) - return; - this.condition = optimizeExpr(this.condition, names, constants); - return this; - } - get names() { - const names = super.names; - addExprNames(names, this.condition); - if (this.else) - addNames(names, this.else.names); - return names; - } -} -If.kind = "if"; -class For extends BlockNode { -} -For.kind = "for"; -class ForLoop extends For { - constructor(iteration) { - super(); - this.iteration = iteration; - } - render(opts) { - return `for(${this.iteration})` + super.render(opts); - } - optimizeNames(names, constants) { - if (!super.optimizeNames(names, constants)) - return; - this.iteration = optimizeExpr(this.iteration, names, constants); - return this; - } - get names() { - return addNames(super.names, this.iteration.names); - } -} -class ForRange extends For { - constructor(varKind, name, from, to) { - super(); - this.varKind = varKind; - this.name = name; - this.from = from; - this.to = to; - } - render(opts) { - const varKind = opts.es5 ? scope_1.varKinds.var : this.varKind; - const { name, from, to } = this; - return `for(${varKind} ${name}=${from}; ${name}<${to}; ${name}++)` + super.render(opts); - } - get names() { - const names = addExprNames(super.names, this.from); - return addExprNames(names, this.to); - } -} -class ForIter extends For { - constructor(loop, varKind, name, iterable) { - super(); - this.loop = loop; - this.varKind = varKind; - this.name = name; - this.iterable = iterable; - } - render(opts) { - return `for(${this.varKind} ${this.name} ${this.loop} ${this.iterable})` + super.render(opts); - } - optimizeNames(names, constants) { - if (!super.optimizeNames(names, constants)) - return; - this.iterable = optimizeExpr(this.iterable, names, constants); - return this; - } - get names() { - return addNames(super.names, this.iterable.names); - } -} -class Func extends BlockNode { - constructor(name, args, async) { - super(); - this.name = name; - this.args = args; - this.async = async; - } - render(opts) { - const _async = this.async ? "async " : ""; - return `${_async}function ${this.name}(${this.args})` + super.render(opts); - } -} -Func.kind = "func"; -class Return extends ParentNode { - render(opts) { - return "return " + super.render(opts); - } -} -Return.kind = "return"; -class Try extends BlockNode { - render(opts) { - let code = "try" + super.render(opts); - if (this.catch) - code += this.catch.render(opts); - if (this.finally) - code += this.finally.render(opts); - return code; - } - optimizeNodes() { - var _a, _b; - super.optimizeNodes(); - (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNodes(); - (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNodes(); - return this; - } - optimizeNames(names, constants) { - var _a, _b; - super.optimizeNames(names, constants); - (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants); - (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNames(names, constants); - return this; - } - get names() { - const names = super.names; - if (this.catch) - addNames(names, this.catch.names); - if (this.finally) - addNames(names, this.finally.names); - return names; - } -} -class Catch extends BlockNode { - constructor(error) { - super(); - this.error = error; - } - render(opts) { - return `catch(${this.error})` + super.render(opts); - } -} -Catch.kind = "catch"; -class Finally extends BlockNode { - render(opts) { - return "finally" + super.render(opts); - } -} -Finally.kind = "finally"; -class CodeGen { - constructor(extScope, opts = {}) { - this._values = {}; - this._blockStarts = []; - this._constants = {}; - this.opts = { ...opts, _n: opts.lines ? "\n" : "" }; - this._extScope = extScope; - this._scope = new scope_1.Scope({ parent: extScope }); - this._nodes = [new Root()]; - } - toString() { - return this._root.render(this.opts); - } - // returns unique name in the internal scope - name(prefix) { - return this._scope.name(prefix); - } - // reserves unique name in the external scope - scopeName(prefix) { - return this._extScope.name(prefix); - } - // reserves unique name in the external scope and assigns value to it - scopeValue(prefixOrName, value) { - const name = this._extScope.value(prefixOrName, value); - const vs = this._values[name.prefix] || (this._values[name.prefix] = new Set()); - vs.add(name); - return name; - } - getScopeValue(prefix, keyOrRef) { - return this._extScope.getValue(prefix, keyOrRef); - } - // return code that assigns values in the external scope to the names that are used internally - // (same names that were returned by gen.scopeName or gen.scopeValue) - scopeRefs(scopeName) { - return this._extScope.scopeRefs(scopeName, this._values); - } - scopeCode() { - return this._extScope.scopeCode(this._values); - } - _def(varKind, nameOrPrefix, rhs, constant) { - const name = this._scope.toName(nameOrPrefix); - if (rhs !== undefined && constant) - this._constants[name.str] = rhs; - this._leafNode(new Def(varKind, name, rhs)); - return name; - } - // `const` declaration (`var` in es5 mode) - const(nameOrPrefix, rhs, _constant) { - return this._def(scope_1.varKinds.const, nameOrPrefix, rhs, _constant); - } - // `let` declaration with optional assignment (`var` in es5 mode) - let(nameOrPrefix, rhs, _constant) { - return this._def(scope_1.varKinds.let, nameOrPrefix, rhs, _constant); - } - // `var` declaration with optional assignment - var(nameOrPrefix, rhs, _constant) { - return this._def(scope_1.varKinds.var, nameOrPrefix, rhs, _constant); - } - // assignment code - assign(lhs, rhs, sideEffects) { - return this._leafNode(new Assign(lhs, rhs, sideEffects)); - } - // `+=` code - add(lhs, rhs) { - return this._leafNode(new AssignOp(lhs, exports.operators.ADD, rhs)); - } - // appends passed SafeExpr to code or executes Block - code(c) { - if (typeof c == "function") - c(); - else if (c !== code_1.nil) - this._leafNode(new AnyCode(c)); - return this; - } - // returns code for object literal for the passed argument list of key-value pairs - object(...keyValues) { - const code = ["{"]; - for (const [key, value] of keyValues) { - if (code.length > 1) - code.push(","); - code.push(key); - if (key !== value || this.opts.es5) { - code.push(":"); - (0, code_1.addCodeArg)(code, value); - } - } - code.push("}"); - return new code_1._Code(code); - } - // `if` clause (or statement if `thenBody` and, optionally, `elseBody` are passed) - if(condition, thenBody, elseBody) { - this._blockNode(new If(condition)); - if (thenBody && elseBody) { - this.code(thenBody).else().code(elseBody).endIf(); - } - else if (thenBody) { - this.code(thenBody).endIf(); - } - else if (elseBody) { - throw new Error('CodeGen: "else" body without "then" body'); - } - return this; - } - // `else if` clause - invalid without `if` or after `else` clauses - elseIf(condition) { - return this._elseNode(new If(condition)); - } - // `else` clause - only valid after `if` or `else if` clauses - else() { - return this._elseNode(new Else()); - } - // end `if` statement (needed if gen.if was used only with condition) - endIf() { - return this._endBlockNode(If, Else); - } - _for(node, forBody) { - this._blockNode(node); - if (forBody) - this.code(forBody).endFor(); - return this; - } - // a generic `for` clause (or statement if `forBody` is passed) - for(iteration, forBody) { - return this._for(new ForLoop(iteration), forBody); - } - // `for` statement for a range of values - forRange(nameOrPrefix, from, to, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.let) { - const name = this._scope.toName(nameOrPrefix); - return this._for(new ForRange(varKind, name, from, to), () => forBody(name)); - } - // `for-of` statement (in es5 mode replace with a normal for loop) - forOf(nameOrPrefix, iterable, forBody, varKind = scope_1.varKinds.const) { - const name = this._scope.toName(nameOrPrefix); - if (this.opts.es5) { - const arr = iterable instanceof code_1.Name ? iterable : this.var("_arr", iterable); - return this.forRange("_i", 0, (0, code_1._) `${arr}.length`, (i) => { - this.var(name, (0, code_1._) `${arr}[${i}]`); - forBody(name); - }); - } - return this._for(new ForIter("of", varKind, name, iterable), () => forBody(name)); - } - // `for-in` statement. - // With option `ownProperties` replaced with a `for-of` loop for object keys - forIn(nameOrPrefix, obj, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.const) { - if (this.opts.ownProperties) { - return this.forOf(nameOrPrefix, (0, code_1._) `Object.keys(${obj})`, forBody); - } - const name = this._scope.toName(nameOrPrefix); - return this._for(new ForIter("in", varKind, name, obj), () => forBody(name)); - } - // end `for` loop - endFor() { - return this._endBlockNode(For); - } - // `label` statement - label(label) { - return this._leafNode(new Label(label)); - } - // `break` statement - break(label) { - return this._leafNode(new Break(label)); - } - // `return` statement - return(value) { - const node = new Return(); - this._blockNode(node); - this.code(value); - if (node.nodes.length !== 1) - throw new Error('CodeGen: "return" should have one node'); - return this._endBlockNode(Return); - } - // `try` statement - try(tryBody, catchCode, finallyCode) { - if (!catchCode && !finallyCode) - throw new Error('CodeGen: "try" without "catch" and "finally"'); - const node = new Try(); - this._blockNode(node); - this.code(tryBody); - if (catchCode) { - const error = this.name("e"); - this._currNode = node.catch = new Catch(error); - catchCode(error); - } - if (finallyCode) { - this._currNode = node.finally = new Finally(); - this.code(finallyCode); - } - return this._endBlockNode(Catch, Finally); - } - // `throw` statement - throw(error) { - return this._leafNode(new Throw(error)); - } - // start self-balancing block - block(body, nodeCount) { - this._blockStarts.push(this._nodes.length); - if (body) - this.code(body).endBlock(nodeCount); - return this; - } - // end the current self-balancing block - endBlock(nodeCount) { - const len = this._blockStarts.pop(); - if (len === undefined) - throw new Error("CodeGen: not in self-balancing block"); - const toClose = this._nodes.length - len; - if (toClose < 0 || (nodeCount !== undefined && toClose !== nodeCount)) { - throw new Error(`CodeGen: wrong number of nodes: ${toClose} vs ${nodeCount} expected`); - } - this._nodes.length = len; - return this; - } - // `function` heading (or definition if funcBody is passed) - func(name, args = code_1.nil, async, funcBody) { - this._blockNode(new Func(name, args, async)); - if (funcBody) - this.code(funcBody).endFunc(); - return this; - } - // end function definition - endFunc() { - return this._endBlockNode(Func); - } - optimize(n = 1) { - while (n-- > 0) { - this._root.optimizeNodes(); - this._root.optimizeNames(this._root.names, this._constants); - } - } - _leafNode(node) { - this._currNode.nodes.push(node); - return this; - } - _blockNode(node) { - this._currNode.nodes.push(node); - this._nodes.push(node); - } - _endBlockNode(N1, N2) { - const n = this._currNode; - if (n instanceof N1 || (N2 && n instanceof N2)) { - this._nodes.pop(); - return this; - } - throw new Error(`CodeGen: not in block "${N2 ? `${N1.kind}/${N2.kind}` : N1.kind}"`); - } - _elseNode(node) { - const n = this._currNode; - if (!(n instanceof If)) { - throw new Error('CodeGen: "else" without "if"'); - } - this._currNode = n.else = node; - return this; - } - get _root() { - return this._nodes[0]; - } - get _currNode() { - const ns = this._nodes; - return ns[ns.length - 1]; - } - set _currNode(node) { - const ns = this._nodes; - ns[ns.length - 1] = node; - } -} -exports.CodeGen = CodeGen; -function addNames(names, from) { - for (const n in from) - names[n] = (names[n] || 0) + (from[n] || 0); - return names; -} -function addExprNames(names, from) { - return from instanceof code_1._CodeOrName ? addNames(names, from.names) : names; -} -function optimizeExpr(expr, names, constants) { - if (expr instanceof code_1.Name) - return replaceName(expr); - if (!canOptimize(expr)) - return expr; - return new code_1._Code(expr._items.reduce((items, c) => { - if (c instanceof code_1.Name) - c = replaceName(c); - if (c instanceof code_1._Code) - items.push(...c._items); - else - items.push(c); - return items; - }, [])); - function replaceName(n) { - const c = constants[n.str]; - if (c === undefined || names[n.str] !== 1) - return n; - delete names[n.str]; - return c; - } - function canOptimize(e) { - return (e instanceof code_1._Code && - e._items.some((c) => c instanceof code_1.Name && names[c.str] === 1 && constants[c.str] !== undefined)); - } -} -function subtractNames(names, from) { - for (const n in from) - names[n] = (names[n] || 0) - (from[n] || 0); -} -function not(x) { - return typeof x == "boolean" || typeof x == "number" || x === null ? !x : (0, code_1._) `!${par(x)}`; -} -exports.not = not; -const andCode = mappend(exports.operators.AND); -// boolean AND (&&) expression with the passed arguments -function and(...args) { - return args.reduce(andCode); -} -exports.and = and; -const orCode = mappend(exports.operators.OR); -// boolean OR (||) expression with the passed arguments -function or(...args) { - return args.reduce(orCode); -} -exports.or = or; -function mappend(op) { - return (x, y) => (x === code_1.nil ? y : y === code_1.nil ? x : (0, code_1._) `${par(x)} ${op} ${par(y)}`); -} -function par(x) { - return x instanceof code_1.Name ? x : (0, code_1._) `(${x})`; -} -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 2893: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ValueScope = exports.ValueScopeName = exports.Scope = exports.varKinds = exports.UsedValueState = void 0; -const code_1 = __nccwpck_require__(8358); -class ValueError extends Error { - constructor(name) { - super(`CodeGen: "code" for ${name} not defined`); - this.value = name.value; - } -} -var UsedValueState; -(function (UsedValueState) { - UsedValueState[UsedValueState["Started"] = 0] = "Started"; - UsedValueState[UsedValueState["Completed"] = 1] = "Completed"; -})(UsedValueState = exports.UsedValueState || (exports.UsedValueState = {})); -exports.varKinds = { - const: new code_1.Name("const"), - let: new code_1.Name("let"), - var: new code_1.Name("var"), -}; -class Scope { - constructor({ prefixes, parent } = {}) { - this._names = {}; - this._prefixes = prefixes; - this._parent = parent; - } - toName(nameOrPrefix) { - return nameOrPrefix instanceof code_1.Name ? nameOrPrefix : this.name(nameOrPrefix); - } - name(prefix) { - return new code_1.Name(this._newName(prefix)); - } - _newName(prefix) { - const ng = this._names[prefix] || this._nameGroup(prefix); - return `${prefix}${ng.index++}`; - } - _nameGroup(prefix) { - var _a, _b; - if (((_b = (_a = this._parent) === null || _a === void 0 ? void 0 : _a._prefixes) === null || _b === void 0 ? void 0 : _b.has(prefix)) || (this._prefixes && !this._prefixes.has(prefix))) { - throw new Error(`CodeGen: prefix "${prefix}" is not allowed in this scope`); - } - return (this._names[prefix] = { prefix, index: 0 }); - } -} -exports.Scope = Scope; -class ValueScopeName extends code_1.Name { - constructor(prefix, nameStr) { - super(nameStr); - this.prefix = prefix; - } - setValue(value, { property, itemIndex }) { - this.value = value; - this.scopePath = (0, code_1._) `.${new code_1.Name(property)}[${itemIndex}]`; - } -} -exports.ValueScopeName = ValueScopeName; -const line = (0, code_1._) `\n`; -class ValueScope extends Scope { - constructor(opts) { - super(opts); - this._values = {}; - this._scope = opts.scope; - this.opts = { ...opts, _n: opts.lines ? line : code_1.nil }; - } - get() { - return this._scope; - } - name(prefix) { - return new ValueScopeName(prefix, this._newName(prefix)); - } - value(nameOrPrefix, value) { - var _a; - if (value.ref === undefined) - throw new Error("CodeGen: ref must be passed in value"); - const name = this.toName(nameOrPrefix); - const { prefix } = name; - const valueKey = (_a = value.key) !== null && _a !== void 0 ? _a : value.ref; - let vs = this._values[prefix]; - if (vs) { - const _name = vs.get(valueKey); - if (_name) - return _name; - } - else { - vs = this._values[prefix] = new Map(); - } - vs.set(valueKey, name); - const s = this._scope[prefix] || (this._scope[prefix] = []); - const itemIndex = s.length; - s[itemIndex] = value.ref; - name.setValue(value, { property: prefix, itemIndex }); - return name; - } - getValue(prefix, keyOrRef) { - const vs = this._values[prefix]; - if (!vs) - return; - return vs.get(keyOrRef); - } - scopeRefs(scopeName, values = this._values) { - return this._reduceValues(values, (name) => { - if (name.scopePath === undefined) - throw new Error(`CodeGen: name "${name}" has no value`); - return (0, code_1._) `${scopeName}${name.scopePath}`; - }); - } - scopeCode(values = this._values, usedValues, getCode) { - return this._reduceValues(values, (name) => { - if (name.value === undefined) - throw new Error(`CodeGen: name "${name}" has no value`); - return name.value.code; - }, usedValues, getCode); - } - _reduceValues(values, valueCode, usedValues = {}, getCode) { - let code = code_1.nil; - for (const prefix in values) { - const vs = values[prefix]; - if (!vs) - continue; - const nameSet = (usedValues[prefix] = usedValues[prefix] || new Map()); - vs.forEach((name) => { - if (nameSet.has(name)) - return; - nameSet.set(name, UsedValueState.Started); - let c = valueCode(name); - if (c) { - const def = this.opts.es5 ? exports.varKinds.var : exports.varKinds.const; - code = (0, code_1._) `${code}${def} ${name} = ${c};${this.opts._n}`; - } - else if ((c = getCode === null || getCode === void 0 ? void 0 : getCode(name))) { - code = (0, code_1._) `${code}${c}${this.opts._n}`; - } - else { - throw new ValueError(name); - } - nameSet.set(name, UsedValueState.Completed); - }); - } - return code; - } -} -exports.ValueScope = ValueScope; -//# sourceMappingURL=scope.js.map - -/***/ }), - -/***/ 6150: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.extendErrors = exports.resetErrorsCount = exports.reportExtraError = exports.reportError = exports.keyword$DataError = exports.keywordError = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const names_1 = __nccwpck_require__(50); -exports.keywordError = { - message: ({ keyword }) => (0, codegen_1.str) `must pass "${keyword}" keyword validation`, -}; -exports.keyword$DataError = { - message: ({ keyword, schemaType }) => schemaType - ? (0, codegen_1.str) `"${keyword}" keyword must be ${schemaType} ($data)` - : (0, codegen_1.str) `"${keyword}" keyword is invalid ($data)`, -}; -function reportError(cxt, error = exports.keywordError, errorPaths, overrideAllErrors) { - const { it } = cxt; - const { gen, compositeRule, allErrors } = it; - const errObj = errorObjectCode(cxt, error, errorPaths); - if (overrideAllErrors !== null && overrideAllErrors !== void 0 ? overrideAllErrors : (compositeRule || allErrors)) { - addError(gen, errObj); - } - else { - returnErrors(it, (0, codegen_1._) `[${errObj}]`); - } -} -exports.reportError = reportError; -function reportExtraError(cxt, error = exports.keywordError, errorPaths) { - const { it } = cxt; - const { gen, compositeRule, allErrors } = it; - const errObj = errorObjectCode(cxt, error, errorPaths); - addError(gen, errObj); - if (!(compositeRule || allErrors)) { - returnErrors(it, names_1.default.vErrors); - } -} -exports.reportExtraError = reportExtraError; -function resetErrorsCount(gen, errsCount) { - gen.assign(names_1.default.errors, errsCount); - gen.if((0, codegen_1._) `${names_1.default.vErrors} !== null`, () => gen.if(errsCount, () => gen.assign((0, codegen_1._) `${names_1.default.vErrors}.length`, errsCount), () => gen.assign(names_1.default.vErrors, null))); -} -exports.resetErrorsCount = resetErrorsCount; -function extendErrors({ gen, keyword, schemaValue, data, errsCount, it, }) { - /* istanbul ignore if */ - if (errsCount === undefined) - throw new Error("ajv implementation error"); - const err = gen.name("err"); - gen.forRange("i", errsCount, names_1.default.errors, (i) => { - gen.const(err, (0, codegen_1._) `${names_1.default.vErrors}[${i}]`); - gen.if((0, codegen_1._) `${err}.instancePath === undefined`, () => gen.assign((0, codegen_1._) `${err}.instancePath`, (0, codegen_1.strConcat)(names_1.default.instancePath, it.errorPath))); - gen.assign((0, codegen_1._) `${err}.schemaPath`, (0, codegen_1.str) `${it.errSchemaPath}/${keyword}`); - if (it.opts.verbose) { - gen.assign((0, codegen_1._) `${err}.schema`, schemaValue); - gen.assign((0, codegen_1._) `${err}.data`, data); - } - }); -} -exports.extendErrors = extendErrors; -function addError(gen, errObj) { - const err = gen.const("err", errObj); - gen.if((0, codegen_1._) `${names_1.default.vErrors} === null`, () => gen.assign(names_1.default.vErrors, (0, codegen_1._) `[${err}]`), (0, codegen_1._) `${names_1.default.vErrors}.push(${err})`); - gen.code((0, codegen_1._) `${names_1.default.errors}++`); -} -function returnErrors(it, errs) { - const { gen, validateName, schemaEnv } = it; - if (schemaEnv.$async) { - gen.throw((0, codegen_1._) `new ${it.ValidationError}(${errs})`); - } - else { - gen.assign((0, codegen_1._) `${validateName}.errors`, errs); - gen.return(false); - } -} -const E = { - keyword: new codegen_1.Name("keyword"), - schemaPath: new codegen_1.Name("schemaPath"), - params: new codegen_1.Name("params"), - propertyName: new codegen_1.Name("propertyName"), - message: new codegen_1.Name("message"), - schema: new codegen_1.Name("schema"), - parentSchema: new codegen_1.Name("parentSchema"), -}; -function errorObjectCode(cxt, error, errorPaths) { - const { createErrors } = cxt.it; - if (createErrors === false) - return (0, codegen_1._) `{}`; - return errorObject(cxt, error, errorPaths); -} -function errorObject(cxt, error, errorPaths = {}) { - const { gen, it } = cxt; - const keyValues = [ - errorInstancePath(it, errorPaths), - errorSchemaPath(cxt, errorPaths), - ]; - extraErrorProps(cxt, error, keyValues); - return gen.object(...keyValues); -} -function errorInstancePath({ errorPath }, { instancePath }) { - const instPath = instancePath - ? (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(instancePath, util_1.Type.Str)}` - : errorPath; - return [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, instPath)]; -} -function errorSchemaPath({ keyword, it: { errSchemaPath } }, { schemaPath, parentSchema }) { - let schPath = parentSchema ? errSchemaPath : (0, codegen_1.str) `${errSchemaPath}/${keyword}`; - if (schemaPath) { - schPath = (0, codegen_1.str) `${schPath}${(0, util_1.getErrorPath)(schemaPath, util_1.Type.Str)}`; - } - return [E.schemaPath, schPath]; -} -function extraErrorProps(cxt, { params, message }, keyValues) { - const { keyword, data, schemaValue, it } = cxt; - const { opts, propertyName, topSchemaRef, schemaPath } = it; - keyValues.push([E.keyword, keyword], [E.params, typeof params == "function" ? params(cxt) : params || (0, codegen_1._) `{}`]); - if (opts.messages) { - keyValues.push([E.message, typeof message == "function" ? message(cxt) : message]); - } - if (opts.verbose) { - keyValues.push([E.schema, schemaValue], [E.parentSchema, (0, codegen_1._) `${topSchemaRef}${schemaPath}`], [names_1.default.data, data]); - } - if (propertyName) - keyValues.push([E.propertyName, propertyName]); -} -//# sourceMappingURL=errors.js.map - -/***/ }), - -/***/ 813: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveSchema = exports.getCompilingSchema = exports.resolveRef = exports.compileSchema = exports.SchemaEnv = void 0; -const codegen_1 = __nccwpck_require__(9179); -const validation_error_1 = __nccwpck_require__(7616); -const names_1 = __nccwpck_require__(50); -const resolve_1 = __nccwpck_require__(6646); -const util_1 = __nccwpck_require__(3439); -const validate_1 = __nccwpck_require__(8955); -class SchemaEnv { - constructor(env) { - var _a; - this.refs = {}; - this.dynamicAnchors = {}; - let schema; - if (typeof env.schema == "object") - schema = env.schema; - this.schema = env.schema; - this.schemaId = env.schemaId; - this.root = env.root || this; - this.baseId = (_a = env.baseId) !== null && _a !== void 0 ? _a : (0, resolve_1.normalizeId)(schema === null || schema === void 0 ? void 0 : schema[env.schemaId || "$id"]); - this.schemaPath = env.schemaPath; - this.localRefs = env.localRefs; - this.meta = env.meta; - this.$async = schema === null || schema === void 0 ? void 0 : schema.$async; - this.refs = {}; - } -} -exports.SchemaEnv = SchemaEnv; -// let codeSize = 0 -// let nodeCount = 0 -// Compiles schema in SchemaEnv -function compileSchema(sch) { - // TODO refactor - remove compilations - const _sch = getCompilingSchema.call(this, sch); - if (_sch) - return _sch; - const rootId = (0, resolve_1.getFullPath)(this.opts.uriResolver, sch.root.baseId); // TODO if getFullPath removed 1 tests fails - const { es5, lines } = this.opts.code; - const { ownProperties } = this.opts; - const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties }); - let _ValidationError; - if (sch.$async) { - _ValidationError = gen.scopeValue("Error", { - ref: validation_error_1.default, - code: (0, codegen_1._) `require("ajv/dist/runtime/validation_error").default`, - }); - } - const validateName = gen.scopeName("validate"); - sch.validateName = validateName; - const schemaCxt = { - gen, - allErrors: this.opts.allErrors, - data: names_1.default.data, - parentData: names_1.default.parentData, - parentDataProperty: names_1.default.parentDataProperty, - dataNames: [names_1.default.data], - dataPathArr: [codegen_1.nil], - dataLevel: 0, - dataTypes: [], - definedProperties: new Set(), - topSchemaRef: gen.scopeValue("schema", this.opts.code.source === true - ? { ref: sch.schema, code: (0, codegen_1.stringify)(sch.schema) } - : { ref: sch.schema }), - validateName, - ValidationError: _ValidationError, - schema: sch.schema, - schemaEnv: sch, - rootId, - baseId: sch.baseId || rootId, - schemaPath: codegen_1.nil, - errSchemaPath: sch.schemaPath || (this.opts.jtd ? "" : "#"), - errorPath: (0, codegen_1._) `""`, - opts: this.opts, - self: this, - }; - let sourceCode; - try { - this._compilations.add(sch); - (0, validate_1.validateFunctionCode)(schemaCxt); - gen.optimize(this.opts.code.optimize); - // gen.optimize(1) - const validateCode = gen.toString(); - sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${validateCode}`; - // console.log((codeSize += sourceCode.length), (nodeCount += gen.nodeCount)) - if (this.opts.code.process) - sourceCode = this.opts.code.process(sourceCode, sch); - // console.log("\n\n\n *** \n", sourceCode) - const makeValidate = new Function(`${names_1.default.self}`, `${names_1.default.scope}`, sourceCode); - const validate = makeValidate(this, this.scope.get()); - this.scope.value(validateName, { ref: validate }); - validate.errors = null; - validate.schema = sch.schema; - validate.schemaEnv = sch; - if (sch.$async) - validate.$async = true; - if (this.opts.code.source === true) { - validate.source = { validateName, validateCode, scopeValues: gen._values }; - } - if (this.opts.unevaluated) { - const { props, items } = schemaCxt; - validate.evaluated = { - props: props instanceof codegen_1.Name ? undefined : props, - items: items instanceof codegen_1.Name ? undefined : items, - dynamicProps: props instanceof codegen_1.Name, - dynamicItems: items instanceof codegen_1.Name, - }; - if (validate.source) - validate.source.evaluated = (0, codegen_1.stringify)(validate.evaluated); - } - sch.validate = validate; - return sch; - } - catch (e) { - delete sch.validate; - delete sch.validateName; - if (sourceCode) - this.logger.error("Error compiling schema, function code:", sourceCode); - // console.log("\n\n\n *** \n", sourceCode, this.opts) - throw e; - } - finally { - this._compilations.delete(sch); - } -} -exports.compileSchema = compileSchema; -function resolveRef(root, baseId, ref) { - var _a; - ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, ref); - const schOrFunc = root.refs[ref]; - if (schOrFunc) - return schOrFunc; - let _sch = resolve.call(this, root, ref); - if (_sch === undefined) { - const schema = (_a = root.localRefs) === null || _a === void 0 ? void 0 : _a[ref]; // TODO maybe localRefs should hold SchemaEnv - const { schemaId } = this.opts; - if (schema) - _sch = new SchemaEnv({ schema, schemaId, root, baseId }); - } - if (_sch === undefined) - return; - return (root.refs[ref] = inlineOrCompile.call(this, _sch)); -} -exports.resolveRef = resolveRef; -function inlineOrCompile(sch) { - if ((0, resolve_1.inlineRef)(sch.schema, this.opts.inlineRefs)) - return sch.schema; - return sch.validate ? sch : compileSchema.call(this, sch); -} -// Index of schema compilation in the currently compiled list -function getCompilingSchema(schEnv) { - for (const sch of this._compilations) { - if (sameSchemaEnv(sch, schEnv)) - return sch; - } -} -exports.getCompilingSchema = getCompilingSchema; -function sameSchemaEnv(s1, s2) { - return s1.schema === s2.schema && s1.root === s2.root && s1.baseId === s2.baseId; -} -// resolve and compile the references ($ref) -// TODO returns AnySchemaObject (if the schema can be inlined) or validation function -function resolve(root, // information about the root schema for the current schema -ref // reference to resolve -) { - let sch; - while (typeof (sch = this.refs[ref]) == "string") - ref = sch; - return sch || this.schemas[ref] || resolveSchema.call(this, root, ref); -} -// Resolve schema, its root and baseId -function resolveSchema(root, // root object with properties schema, refs TODO below SchemaEnv is assigned to it -ref // reference to resolve -) { - const p = this.opts.uriResolver.parse(ref); - const refPath = (0, resolve_1._getFullPath)(this.opts.uriResolver, p); - let baseId = (0, resolve_1.getFullPath)(this.opts.uriResolver, root.baseId, undefined); - // TODO `Object.keys(root.schema).length > 0` should not be needed - but removing breaks 2 tests - if (Object.keys(root.schema).length > 0 && refPath === baseId) { - return getJsonPointer.call(this, p, root); - } - const id = (0, resolve_1.normalizeId)(refPath); - const schOrRef = this.refs[id] || this.schemas[id]; - if (typeof schOrRef == "string") { - const sch = resolveSchema.call(this, root, schOrRef); - if (typeof (sch === null || sch === void 0 ? void 0 : sch.schema) !== "object") - return; - return getJsonPointer.call(this, p, sch); - } - if (typeof (schOrRef === null || schOrRef === void 0 ? void 0 : schOrRef.schema) !== "object") - return; - if (!schOrRef.validate) - compileSchema.call(this, schOrRef); - if (id === (0, resolve_1.normalizeId)(ref)) { - const { schema } = schOrRef; - const { schemaId } = this.opts; - const schId = schema[schemaId]; - if (schId) - baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId); - return new SchemaEnv({ schema, schemaId, root, baseId }); - } - return getJsonPointer.call(this, p, schOrRef); -} -exports.resolveSchema = resolveSchema; -const PREVENT_SCOPE_CHANGE = new Set([ - "properties", - "patternProperties", - "enum", - "dependencies", - "definitions", -]); -function getJsonPointer(parsedRef, { baseId, schema, root }) { - var _a; - if (((_a = parsedRef.fragment) === null || _a === void 0 ? void 0 : _a[0]) !== "/") - return; - for (const part of parsedRef.fragment.slice(1).split("/")) { - if (typeof schema === "boolean") - return; - const partSchema = schema[(0, util_1.unescapeFragment)(part)]; - if (partSchema === undefined) - return; - schema = partSchema; - // TODO PREVENT_SCOPE_CHANGE could be defined in keyword def? - const schId = typeof schema === "object" && schema[this.opts.schemaId]; - if (!PREVENT_SCOPE_CHANGE.has(part) && schId) { - baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId); - } - } - let env; - if (typeof schema != "boolean" && schema.$ref && !(0, util_1.schemaHasRulesButRef)(schema, this.RULES)) { - const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref); - env = resolveSchema.call(this, root, $ref); - } - // even though resolution failed we need to return SchemaEnv to throw exception - // so that compileAsync loads missing schema. - const { schemaId } = this.opts; - env = env || new SchemaEnv({ schema, schemaId, root, baseId }); - if (env.schema !== env.root.schema) - return env; - return undefined; -} -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 50: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const names = { - // validation function arguments - data: new codegen_1.Name("data"), - // args passed from referencing schema - valCxt: new codegen_1.Name("valCxt"), - instancePath: new codegen_1.Name("instancePath"), - parentData: new codegen_1.Name("parentData"), - parentDataProperty: new codegen_1.Name("parentDataProperty"), - rootData: new codegen_1.Name("rootData"), - dynamicAnchors: new codegen_1.Name("dynamicAnchors"), - // function scoped variables - vErrors: new codegen_1.Name("vErrors"), - errors: new codegen_1.Name("errors"), - this: new codegen_1.Name("this"), - // "globals" - self: new codegen_1.Name("self"), - scope: new codegen_1.Name("scope"), - // JTD serialize/parse name for JSON string and position - json: new codegen_1.Name("json"), - jsonPos: new codegen_1.Name("jsonPos"), - jsonLen: new codegen_1.Name("jsonLen"), - jsonPart: new codegen_1.Name("jsonPart"), -}; -exports.default = names; -//# sourceMappingURL=names.js.map - -/***/ }), - -/***/ 8190: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const resolve_1 = __nccwpck_require__(6646); -class MissingRefError extends Error { - constructor(resolver, baseId, ref, msg) { - super(msg || `can't resolve reference ${ref} from id ${baseId}`); - this.missingRef = (0, resolve_1.resolveUrl)(resolver, baseId, ref); - this.missingSchema = (0, resolve_1.normalizeId)((0, resolve_1.getFullPath)(resolver, this.missingRef)); - } -} -exports.default = MissingRefError; -//# sourceMappingURL=ref_error.js.map - -/***/ }), - -/***/ 6646: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSchemaRefs = exports.resolveUrl = exports.normalizeId = exports._getFullPath = exports.getFullPath = exports.inlineRef = void 0; -const util_1 = __nccwpck_require__(3439); -const equal = __nccwpck_require__(8206); -const traverse = __nccwpck_require__(2533); -// TODO refactor to use keyword definitions -const SIMPLE_INLINED = new Set([ - "type", - "format", - "pattern", - "maxLength", - "minLength", - "maxProperties", - "minProperties", - "maxItems", - "minItems", - "maximum", - "minimum", - "uniqueItems", - "multipleOf", - "required", - "enum", - "const", -]); -function inlineRef(schema, limit = true) { - if (typeof schema == "boolean") - return true; - if (limit === true) - return !hasRef(schema); - if (!limit) - return false; - return countKeys(schema) <= limit; -} -exports.inlineRef = inlineRef; -const REF_KEYWORDS = new Set([ - "$ref", - "$recursiveRef", - "$recursiveAnchor", - "$dynamicRef", - "$dynamicAnchor", -]); -function hasRef(schema) { - for (const key in schema) { - if (REF_KEYWORDS.has(key)) - return true; - const sch = schema[key]; - if (Array.isArray(sch) && sch.some(hasRef)) - return true; - if (typeof sch == "object" && hasRef(sch)) - return true; - } - return false; -} -function countKeys(schema) { - let count = 0; - for (const key in schema) { - if (key === "$ref") - return Infinity; - count++; - if (SIMPLE_INLINED.has(key)) - continue; - if (typeof schema[key] == "object") { - (0, util_1.eachItem)(schema[key], (sch) => (count += countKeys(sch))); - } - if (count === Infinity) - return Infinity; - } - return count; -} -function getFullPath(resolver, id = "", normalize) { - if (normalize !== false) - id = normalizeId(id); - const p = resolver.parse(id); - return _getFullPath(resolver, p); -} -exports.getFullPath = getFullPath; -function _getFullPath(resolver, p) { - const serialized = resolver.serialize(p); - return serialized.split("#")[0] + "#"; -} -exports._getFullPath = _getFullPath; -const TRAILING_SLASH_HASH = /#\/?$/; -function normalizeId(id) { - return id ? id.replace(TRAILING_SLASH_HASH, "") : ""; -} -exports.normalizeId = normalizeId; -function resolveUrl(resolver, baseId, id) { - id = normalizeId(id); - return resolver.resolve(baseId, id); -} -exports.resolveUrl = resolveUrl; -const ANCHOR = /^[a-z_][-a-z0-9._]*$/i; -function getSchemaRefs(schema, baseId) { - if (typeof schema == "boolean") - return {}; - const { schemaId, uriResolver } = this.opts; - const schId = normalizeId(schema[schemaId] || baseId); - const baseIds = { "": schId }; - const pathPrefix = getFullPath(uriResolver, schId, false); - const localRefs = {}; - const schemaRefs = new Set(); - traverse(schema, { allKeys: true }, (sch, jsonPtr, _, parentJsonPtr) => { - if (parentJsonPtr === undefined) - return; - const fullPath = pathPrefix + jsonPtr; - let baseId = baseIds[parentJsonPtr]; - if (typeof sch[schemaId] == "string") - baseId = addRef.call(this, sch[schemaId]); - addAnchor.call(this, sch.$anchor); - addAnchor.call(this, sch.$dynamicAnchor); - baseIds[jsonPtr] = baseId; - function addRef(ref) { - // eslint-disable-next-line @typescript-eslint/unbound-method - const _resolve = this.opts.uriResolver.resolve; - ref = normalizeId(baseId ? _resolve(baseId, ref) : ref); - if (schemaRefs.has(ref)) - throw ambiguos(ref); - schemaRefs.add(ref); - let schOrRef = this.refs[ref]; - if (typeof schOrRef == "string") - schOrRef = this.refs[schOrRef]; - if (typeof schOrRef == "object") { - checkAmbiguosRef(sch, schOrRef.schema, ref); - } - else if (ref !== normalizeId(fullPath)) { - if (ref[0] === "#") { - checkAmbiguosRef(sch, localRefs[ref], ref); - localRefs[ref] = sch; - } - else { - this.refs[ref] = fullPath; - } - } - return ref; - } - function addAnchor(anchor) { - if (typeof anchor == "string") { - if (!ANCHOR.test(anchor)) - throw new Error(`invalid anchor "${anchor}"`); - addRef.call(this, `#${anchor}`); - } - } - }); - return localRefs; - function checkAmbiguosRef(sch1, sch2, ref) { - if (sch2 !== undefined && !equal(sch1, sch2)) - throw ambiguos(ref); - } - function ambiguos(ref) { - return new Error(`reference "${ref}" resolves to more than one schema`); - } -} -exports.getSchemaRefs = getSchemaRefs; -//# sourceMappingURL=resolve.js.map - -/***/ }), - -/***/ 240: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRules = exports.isJSONType = void 0; -const _jsonTypes = ["string", "number", "integer", "boolean", "null", "object", "array"]; -const jsonTypes = new Set(_jsonTypes); -function isJSONType(x) { - return typeof x == "string" && jsonTypes.has(x); -} -exports.isJSONType = isJSONType; -function getRules() { - const groups = { - number: { type: "number", rules: [] }, - string: { type: "string", rules: [] }, - array: { type: "array", rules: [] }, - object: { type: "object", rules: [] }, - }; - return { - types: { ...groups, integer: true, boolean: true, null: true }, - rules: [{ rules: [] }, groups.number, groups.string, groups.array, groups.object], - post: { rules: [] }, - all: {}, - keywords: {}, - }; -} -exports.getRules = getRules; -//# sourceMappingURL=rules.js.map - -/***/ }), - -/***/ 3439: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkStrictMode = exports.getErrorPath = exports.Type = exports.useFunc = exports.setEvaluated = exports.evaluatedPropsToName = exports.mergeEvaluated = exports.eachItem = exports.unescapeJsonPointer = exports.escapeJsonPointer = exports.escapeFragment = exports.unescapeFragment = exports.schemaRefOrVal = exports.schemaHasRulesButRef = exports.schemaHasRules = exports.checkUnknownRules = exports.alwaysValidSchema = exports.toHash = void 0; -const codegen_1 = __nccwpck_require__(9179); -const code_1 = __nccwpck_require__(8358); -// TODO refactor to use Set -function toHash(arr) { - const hash = {}; - for (const item of arr) - hash[item] = true; - return hash; -} -exports.toHash = toHash; -function alwaysValidSchema(it, schema) { - if (typeof schema == "boolean") - return schema; - if (Object.keys(schema).length === 0) - return true; - checkUnknownRules(it, schema); - return !schemaHasRules(schema, it.self.RULES.all); -} -exports.alwaysValidSchema = alwaysValidSchema; -function checkUnknownRules(it, schema = it.schema) { - const { opts, self } = it; - if (!opts.strictSchema) - return; - if (typeof schema === "boolean") - return; - const rules = self.RULES.keywords; - for (const key in schema) { - if (!rules[key]) - checkStrictMode(it, `unknown keyword: "${key}"`); - } -} -exports.checkUnknownRules = checkUnknownRules; -function schemaHasRules(schema, rules) { - if (typeof schema == "boolean") - return !schema; - for (const key in schema) - if (rules[key]) - return true; - return false; -} -exports.schemaHasRules = schemaHasRules; -function schemaHasRulesButRef(schema, RULES) { - if (typeof schema == "boolean") - return !schema; - for (const key in schema) - if (key !== "$ref" && RULES.all[key]) - return true; - return false; -} -exports.schemaHasRulesButRef = schemaHasRulesButRef; -function schemaRefOrVal({ topSchemaRef, schemaPath }, schema, keyword, $data) { - if (!$data) { - if (typeof schema == "number" || typeof schema == "boolean") - return schema; - if (typeof schema == "string") - return (0, codegen_1._) `${schema}`; - } - return (0, codegen_1._) `${topSchemaRef}${schemaPath}${(0, codegen_1.getProperty)(keyword)}`; -} -exports.schemaRefOrVal = schemaRefOrVal; -function unescapeFragment(str) { - return unescapeJsonPointer(decodeURIComponent(str)); -} -exports.unescapeFragment = unescapeFragment; -function escapeFragment(str) { - return encodeURIComponent(escapeJsonPointer(str)); -} -exports.escapeFragment = escapeFragment; -function escapeJsonPointer(str) { - if (typeof str == "number") - return `${str}`; - return str.replace(/~/g, "~0").replace(/\//g, "~1"); -} -exports.escapeJsonPointer = escapeJsonPointer; -function unescapeJsonPointer(str) { - return str.replace(/~1/g, "/").replace(/~0/g, "~"); -} -exports.unescapeJsonPointer = unescapeJsonPointer; -function eachItem(xs, f) { - if (Array.isArray(xs)) { - for (const x of xs) - f(x); - } - else { - f(xs); - } -} -exports.eachItem = eachItem; -function makeMergeEvaluated({ mergeNames, mergeToName, mergeValues, resultToName, }) { - return (gen, from, to, toName) => { - const res = to === undefined - ? from - : to instanceof codegen_1.Name - ? (from instanceof codegen_1.Name ? mergeNames(gen, from, to) : mergeToName(gen, from, to), to) - : from instanceof codegen_1.Name - ? (mergeToName(gen, to, from), from) - : mergeValues(from, to); - return toName === codegen_1.Name && !(res instanceof codegen_1.Name) ? resultToName(gen, res) : res; - }; -} -exports.mergeEvaluated = { - props: makeMergeEvaluated({ - mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => { - gen.if((0, codegen_1._) `${from} === true`, () => gen.assign(to, true), () => gen.assign(to, (0, codegen_1._) `${to} || {}`).code((0, codegen_1._) `Object.assign(${to}, ${from})`)); - }), - mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => { - if (from === true) { - gen.assign(to, true); - } - else { - gen.assign(to, (0, codegen_1._) `${to} || {}`); - setEvaluated(gen, to, from); - } - }), - mergeValues: (from, to) => (from === true ? true : { ...from, ...to }), - resultToName: evaluatedPropsToName, - }), - items: makeMergeEvaluated({ - mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => gen.assign(to, (0, codegen_1._) `${from} === true ? true : ${to} > ${from} ? ${to} : ${from}`)), - mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => gen.assign(to, from === true ? true : (0, codegen_1._) `${to} > ${from} ? ${to} : ${from}`)), - mergeValues: (from, to) => (from === true ? true : Math.max(from, to)), - resultToName: (gen, items) => gen.var("items", items), - }), -}; -function evaluatedPropsToName(gen, ps) { - if (ps === true) - return gen.var("props", true); - const props = gen.var("props", (0, codegen_1._) `{}`); - if (ps !== undefined) - setEvaluated(gen, props, ps); - return props; -} -exports.evaluatedPropsToName = evaluatedPropsToName; -function setEvaluated(gen, props, ps) { - Object.keys(ps).forEach((p) => gen.assign((0, codegen_1._) `${props}${(0, codegen_1.getProperty)(p)}`, true)); -} -exports.setEvaluated = setEvaluated; -const snippets = {}; -function useFunc(gen, f) { - return gen.scopeValue("func", { - ref: f, - code: snippets[f.code] || (snippets[f.code] = new code_1._Code(f.code)), - }); -} -exports.useFunc = useFunc; -var Type; -(function (Type) { - Type[Type["Num"] = 0] = "Num"; - Type[Type["Str"] = 1] = "Str"; -})(Type = exports.Type || (exports.Type = {})); -function getErrorPath(dataProp, dataPropType, jsPropertySyntax) { - // let path - if (dataProp instanceof codegen_1.Name) { - const isNumber = dataPropType === Type.Num; - return jsPropertySyntax - ? isNumber - ? (0, codegen_1._) `"[" + ${dataProp} + "]"` - : (0, codegen_1._) `"['" + ${dataProp} + "']"` - : isNumber - ? (0, codegen_1._) `"/" + ${dataProp}` - : (0, codegen_1._) `"/" + ${dataProp}.replace(/~/g, "~0").replace(/\\//g, "~1")`; // TODO maybe use global escapePointer - } - return jsPropertySyntax ? (0, codegen_1.getProperty)(dataProp).toString() : "/" + escapeJsonPointer(dataProp); -} -exports.getErrorPath = getErrorPath; -function checkStrictMode(it, msg, mode = it.opts.strictSchema) { - if (!mode) - return; - msg = `strict mode: ${msg}`; - if (mode === true) - throw new Error(msg); - it.self.logger.warn(msg); -} -exports.checkStrictMode = checkStrictMode; -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 3627: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.shouldUseRule = exports.shouldUseGroup = exports.schemaHasRulesForType = void 0; -function schemaHasRulesForType({ schema, self }, type) { - const group = self.RULES.types[type]; - return group && group !== true && shouldUseGroup(schema, group); -} -exports.schemaHasRulesForType = schemaHasRulesForType; -function shouldUseGroup(schema, group) { - return group.rules.some((rule) => shouldUseRule(schema, rule)); -} -exports.shouldUseGroup = shouldUseGroup; -function shouldUseRule(schema, rule) { - var _a; - return (schema[rule.keyword] !== undefined || - ((_a = rule.definition.implements) === null || _a === void 0 ? void 0 : _a.some((kwd) => schema[kwd] !== undefined))); -} -exports.shouldUseRule = shouldUseRule; -//# sourceMappingURL=applicability.js.map - -/***/ }), - -/***/ 6214: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.boolOrEmptySchema = exports.topBoolOrEmptySchema = void 0; -const errors_1 = __nccwpck_require__(6150); -const codegen_1 = __nccwpck_require__(9179); -const names_1 = __nccwpck_require__(50); -const boolError = { - message: "boolean schema is false", -}; -function topBoolOrEmptySchema(it) { - const { gen, schema, validateName } = it; - if (schema === false) { - falseSchemaError(it, false); - } - else if (typeof schema == "object" && schema.$async === true) { - gen.return(names_1.default.data); - } - else { - gen.assign((0, codegen_1._) `${validateName}.errors`, null); - gen.return(true); - } -} -exports.topBoolOrEmptySchema = topBoolOrEmptySchema; -function boolOrEmptySchema(it, valid) { - const { gen, schema } = it; - if (schema === false) { - gen.var(valid, false); // TODO var - falseSchemaError(it); - } - else { - gen.var(valid, true); // TODO var - } -} -exports.boolOrEmptySchema = boolOrEmptySchema; -function falseSchemaError(it, overrideAllErrors) { - const { gen, data } = it; - // TODO maybe some other interface should be used for non-keyword validation errors... - const cxt = { - gen, - keyword: "false schema", - data, - schema: false, - schemaCode: false, - schemaValue: false, - params: {}, - it, - }; - (0, errors_1.reportError)(cxt, boolError, undefined, overrideAllErrors); -} -//# sourceMappingURL=boolSchema.js.map - -/***/ }), - -/***/ 7725: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0; -const rules_1 = __nccwpck_require__(240); -const applicability_1 = __nccwpck_require__(3627); -const errors_1 = __nccwpck_require__(6150); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -var DataType; -(function (DataType) { - DataType[DataType["Correct"] = 0] = "Correct"; - DataType[DataType["Wrong"] = 1] = "Wrong"; -})(DataType = exports.DataType || (exports.DataType = {})); -function getSchemaTypes(schema) { - const types = getJSONTypes(schema.type); - const hasNull = types.includes("null"); - if (hasNull) { - if (schema.nullable === false) - throw new Error("type: null contradicts nullable: false"); - } - else { - if (!types.length && schema.nullable !== undefined) { - throw new Error('"nullable" cannot be used without "type"'); - } - if (schema.nullable === true) - types.push("null"); - } - return types; -} -exports.getSchemaTypes = getSchemaTypes; -function getJSONTypes(ts) { - const types = Array.isArray(ts) ? ts : ts ? [ts] : []; - if (types.every(rules_1.isJSONType)) - return types; - throw new Error("type must be JSONType or JSONType[]: " + types.join(",")); -} -exports.getJSONTypes = getJSONTypes; -function coerceAndCheckDataType(it, types) { - const { gen, data, opts } = it; - const coerceTo = coerceToTypes(types, opts.coerceTypes); - const checkTypes = types.length > 0 && - !(coerceTo.length === 0 && types.length === 1 && (0, applicability_1.schemaHasRulesForType)(it, types[0])); - if (checkTypes) { - const wrongType = checkDataTypes(types, data, opts.strictNumbers, DataType.Wrong); - gen.if(wrongType, () => { - if (coerceTo.length) - coerceData(it, types, coerceTo); - else - reportTypeError(it); - }); - } - return checkTypes; -} -exports.coerceAndCheckDataType = coerceAndCheckDataType; -const COERCIBLE = new Set(["string", "number", "integer", "boolean", "null"]); -function coerceToTypes(types, coerceTypes) { - return coerceTypes - ? types.filter((t) => COERCIBLE.has(t) || (coerceTypes === "array" && t === "array")) - : []; -} -function coerceData(it, types, coerceTo) { - const { gen, data, opts } = it; - const dataType = gen.let("dataType", (0, codegen_1._) `typeof ${data}`); - const coerced = gen.let("coerced", (0, codegen_1._) `undefined`); - if (opts.coerceTypes === "array") { - gen.if((0, codegen_1._) `${dataType} == 'object' && Array.isArray(${data}) && ${data}.length == 1`, () => gen - .assign(data, (0, codegen_1._) `${data}[0]`) - .assign(dataType, (0, codegen_1._) `typeof ${data}`) - .if(checkDataTypes(types, data, opts.strictNumbers), () => gen.assign(coerced, data))); - } - gen.if((0, codegen_1._) `${coerced} !== undefined`); - for (const t of coerceTo) { - if (COERCIBLE.has(t) || (t === "array" && opts.coerceTypes === "array")) { - coerceSpecificType(t); - } - } - gen.else(); - reportTypeError(it); - gen.endIf(); - gen.if((0, codegen_1._) `${coerced} !== undefined`, () => { - gen.assign(data, coerced); - assignParentData(it, coerced); - }); - function coerceSpecificType(t) { - switch (t) { - case "string": - gen - .elseIf((0, codegen_1._) `${dataType} == "number" || ${dataType} == "boolean"`) - .assign(coerced, (0, codegen_1._) `"" + ${data}`) - .elseIf((0, codegen_1._) `${data} === null`) - .assign(coerced, (0, codegen_1._) `""`); - return; - case "number": - gen - .elseIf((0, codegen_1._) `${dataType} == "boolean" || ${data} === null - || (${dataType} == "string" && ${data} && ${data} == +${data})`) - .assign(coerced, (0, codegen_1._) `+${data}`); - return; - case "integer": - gen - .elseIf((0, codegen_1._) `${dataType} === "boolean" || ${data} === null - || (${dataType} === "string" && ${data} && ${data} == +${data} && !(${data} % 1))`) - .assign(coerced, (0, codegen_1._) `+${data}`); - return; - case "boolean": - gen - .elseIf((0, codegen_1._) `${data} === "false" || ${data} === 0 || ${data} === null`) - .assign(coerced, false) - .elseIf((0, codegen_1._) `${data} === "true" || ${data} === 1`) - .assign(coerced, true); - return; - case "null": - gen.elseIf((0, codegen_1._) `${data} === "" || ${data} === 0 || ${data} === false`); - gen.assign(coerced, null); - return; - case "array": - gen - .elseIf((0, codegen_1._) `${dataType} === "string" || ${dataType} === "number" - || ${dataType} === "boolean" || ${data} === null`) - .assign(coerced, (0, codegen_1._) `[${data}]`); - } - } -} -function assignParentData({ gen, parentData, parentDataProperty }, expr) { - // TODO use gen.property - gen.if((0, codegen_1._) `${parentData} !== undefined`, () => gen.assign((0, codegen_1._) `${parentData}[${parentDataProperty}]`, expr)); -} -function checkDataType(dataType, data, strictNums, correct = DataType.Correct) { - const EQ = correct === DataType.Correct ? codegen_1.operators.EQ : codegen_1.operators.NEQ; - let cond; - switch (dataType) { - case "null": - return (0, codegen_1._) `${data} ${EQ} null`; - case "array": - cond = (0, codegen_1._) `Array.isArray(${data})`; - break; - case "object": - cond = (0, codegen_1._) `${data} && typeof ${data} == "object" && !Array.isArray(${data})`; - break; - case "integer": - cond = numCond((0, codegen_1._) `!(${data} % 1) && !isNaN(${data})`); - break; - case "number": - cond = numCond(); - break; - default: - return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`; - } - return correct === DataType.Correct ? cond : (0, codegen_1.not)(cond); - function numCond(_cond = codegen_1.nil) { - return (0, codegen_1.and)((0, codegen_1._) `typeof ${data} == "number"`, _cond, strictNums ? (0, codegen_1._) `isFinite(${data})` : codegen_1.nil); - } -} -exports.checkDataType = checkDataType; -function checkDataTypes(dataTypes, data, strictNums, correct) { - if (dataTypes.length === 1) { - return checkDataType(dataTypes[0], data, strictNums, correct); - } - let cond; - const types = (0, util_1.toHash)(dataTypes); - if (types.array && types.object) { - const notObj = (0, codegen_1._) `typeof ${data} != "object"`; - cond = types.null ? notObj : (0, codegen_1._) `!${data} || ${notObj}`; - delete types.null; - delete types.array; - delete types.object; - } - else { - cond = codegen_1.nil; - } - if (types.number) - delete types.integer; - for (const t in types) - cond = (0, codegen_1.and)(cond, checkDataType(t, data, strictNums, correct)); - return cond; -} -exports.checkDataTypes = checkDataTypes; -const typeError = { - message: ({ schema }) => `must be ${schema}`, - params: ({ schema, schemaValue }) => typeof schema == "string" ? (0, codegen_1._) `{type: ${schema}}` : (0, codegen_1._) `{type: ${schemaValue}}`, -}; -function reportTypeError(it) { - const cxt = getTypeErrorContext(it); - (0, errors_1.reportError)(cxt, typeError); -} -exports.reportTypeError = reportTypeError; -function getTypeErrorContext(it) { - const { gen, data, schema } = it; - const schemaCode = (0, util_1.schemaRefOrVal)(it, schema, "type"); - return { - gen, - keyword: "type", - data, - schema: schema.type, - schemaCode, - schemaValue: schemaCode, - parentSchema: schema, - params: {}, - it, - }; -} -//# sourceMappingURL=dataType.js.map - -/***/ }), - -/***/ 9593: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.assignDefaults = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -function assignDefaults(it, ty) { - const { properties, items } = it.schema; - if (ty === "object" && properties) { - for (const key in properties) { - assignDefault(it, key, properties[key].default); - } - } - else if (ty === "array" && Array.isArray(items)) { - items.forEach((sch, i) => assignDefault(it, i, sch.default)); - } -} -exports.assignDefaults = assignDefaults; -function assignDefault(it, prop, defaultValue) { - const { gen, compositeRule, data, opts } = it; - if (defaultValue === undefined) - return; - const childData = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(prop)}`; - if (compositeRule) { - (0, util_1.checkStrictMode)(it, `default is ignored for: ${childData}`); - return; - } - let condition = (0, codegen_1._) `${childData} === undefined`; - if (opts.useDefaults === "empty") { - condition = (0, codegen_1._) `${condition} || ${childData} === null || ${childData} === ""`; - } - // `${childData} === undefined` + - // (opts.useDefaults === "empty" ? ` || ${childData} === null || ${childData} === ""` : "") - gen.if(condition, (0, codegen_1._) `${childData} = ${(0, codegen_1.stringify)(defaultValue)}`); -} -//# sourceMappingURL=defaults.js.map - -/***/ }), - -/***/ 8955: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getData = exports.KeywordCxt = exports.validateFunctionCode = void 0; -const boolSchema_1 = __nccwpck_require__(6214); -const dataType_1 = __nccwpck_require__(7725); -const applicability_1 = __nccwpck_require__(3627); -const dataType_2 = __nccwpck_require__(7725); -const defaults_1 = __nccwpck_require__(9593); -const keyword_1 = __nccwpck_require__(8732); -const subschema_1 = __nccwpck_require__(3896); -const codegen_1 = __nccwpck_require__(9179); -const names_1 = __nccwpck_require__(50); -const resolve_1 = __nccwpck_require__(6646); -const util_1 = __nccwpck_require__(3439); -const errors_1 = __nccwpck_require__(6150); -// schema compilation - generates validation function, subschemaCode (below) is used for subschemas -function validateFunctionCode(it) { - if (isSchemaObj(it)) { - checkKeywords(it); - if (schemaCxtHasRules(it)) { - topSchemaObjCode(it); - return; - } - } - validateFunction(it, () => (0, boolSchema_1.topBoolOrEmptySchema)(it)); -} -exports.validateFunctionCode = validateFunctionCode; -function validateFunction({ gen, validateName, schema, schemaEnv, opts }, body) { - if (opts.code.es5) { - gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${names_1.default.valCxt}`, schemaEnv.$async, () => { - gen.code((0, codegen_1._) `"use strict"; ${funcSourceUrl(schema, opts)}`); - destructureValCxtES5(gen, opts); - gen.code(body); - }); - } - else { - gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${destructureValCxt(opts)}`, schemaEnv.$async, () => gen.code(funcSourceUrl(schema, opts)).code(body)); - } -} -function destructureValCxt(opts) { - return (0, codegen_1._) `{${names_1.default.instancePath}="", ${names_1.default.parentData}, ${names_1.default.parentDataProperty}, ${names_1.default.rootData}=${names_1.default.data}${opts.dynamicRef ? (0, codegen_1._) `, ${names_1.default.dynamicAnchors}={}` : codegen_1.nil}}={}`; -} -function destructureValCxtES5(gen, opts) { - gen.if(names_1.default.valCxt, () => { - gen.var(names_1.default.instancePath, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.instancePath}`); - gen.var(names_1.default.parentData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentData}`); - gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentDataProperty}`); - gen.var(names_1.default.rootData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.rootData}`); - if (opts.dynamicRef) - gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.dynamicAnchors}`); - }, () => { - gen.var(names_1.default.instancePath, (0, codegen_1._) `""`); - gen.var(names_1.default.parentData, (0, codegen_1._) `undefined`); - gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `undefined`); - gen.var(names_1.default.rootData, names_1.default.data); - if (opts.dynamicRef) - gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `{}`); - }); -} -function topSchemaObjCode(it) { - const { schema, opts, gen } = it; - validateFunction(it, () => { - if (opts.$comment && schema.$comment) - commentKeyword(it); - checkNoDefault(it); - gen.let(names_1.default.vErrors, null); - gen.let(names_1.default.errors, 0); - if (opts.unevaluated) - resetEvaluated(it); - typeAndKeywords(it); - returnResults(it); - }); - return; -} -function resetEvaluated(it) { - // TODO maybe some hook to execute it in the end to check whether props/items are Name, as in assignEvaluated - const { gen, validateName } = it; - it.evaluated = gen.const("evaluated", (0, codegen_1._) `${validateName}.evaluated`); - gen.if((0, codegen_1._) `${it.evaluated}.dynamicProps`, () => gen.assign((0, codegen_1._) `${it.evaluated}.props`, (0, codegen_1._) `undefined`)); - gen.if((0, codegen_1._) `${it.evaluated}.dynamicItems`, () => gen.assign((0, codegen_1._) `${it.evaluated}.items`, (0, codegen_1._) `undefined`)); -} -function funcSourceUrl(schema, opts) { - const schId = typeof schema == "object" && schema[opts.schemaId]; - return schId && (opts.code.source || opts.code.process) ? (0, codegen_1._) `/*# sourceURL=${schId} */` : codegen_1.nil; -} -// schema compilation - this function is used recursively to generate code for sub-schemas -function subschemaCode(it, valid) { - if (isSchemaObj(it)) { - checkKeywords(it); - if (schemaCxtHasRules(it)) { - subSchemaObjCode(it, valid); - return; - } - } - (0, boolSchema_1.boolOrEmptySchema)(it, valid); -} -function schemaCxtHasRules({ schema, self }) { - if (typeof schema == "boolean") - return !schema; - for (const key in schema) - if (self.RULES.all[key]) - return true; - return false; -} -function isSchemaObj(it) { - return typeof it.schema != "boolean"; -} -function subSchemaObjCode(it, valid) { - const { schema, gen, opts } = it; - if (opts.$comment && schema.$comment) - commentKeyword(it); - updateContext(it); - checkAsyncSchema(it); - const errsCount = gen.const("_errs", names_1.default.errors); - typeAndKeywords(it, errsCount); - // TODO var - gen.var(valid, (0, codegen_1._) `${errsCount} === ${names_1.default.errors}`); -} -function checkKeywords(it) { - (0, util_1.checkUnknownRules)(it); - checkRefsAndKeywords(it); -} -function typeAndKeywords(it, errsCount) { - if (it.opts.jtd) - return schemaKeywords(it, [], false, errsCount); - const types = (0, dataType_1.getSchemaTypes)(it.schema); - const checkedTypes = (0, dataType_1.coerceAndCheckDataType)(it, types); - schemaKeywords(it, types, !checkedTypes, errsCount); -} -function checkRefsAndKeywords(it) { - const { schema, errSchemaPath, opts, self } = it; - if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1.schemaHasRulesButRef)(schema, self.RULES)) { - self.logger.warn(`$ref: keywords ignored in schema at path "${errSchemaPath}"`); - } -} -function checkNoDefault(it) { - const { schema, opts } = it; - if (schema.default !== undefined && opts.useDefaults && opts.strictSchema) { - (0, util_1.checkStrictMode)(it, "default is ignored in the schema root"); - } -} -function updateContext(it) { - const schId = it.schema[it.opts.schemaId]; - if (schId) - it.baseId = (0, resolve_1.resolveUrl)(it.opts.uriResolver, it.baseId, schId); -} -function checkAsyncSchema(it) { - if (it.schema.$async && !it.schemaEnv.$async) - throw new Error("async schema in sync schema"); -} -function commentKeyword({ gen, schemaEnv, schema, errSchemaPath, opts }) { - const msg = schema.$comment; - if (opts.$comment === true) { - gen.code((0, codegen_1._) `${names_1.default.self}.logger.log(${msg})`); - } - else if (typeof opts.$comment == "function") { - const schemaPath = (0, codegen_1.str) `${errSchemaPath}/$comment`; - const rootName = gen.scopeValue("root", { ref: schemaEnv.root }); - gen.code((0, codegen_1._) `${names_1.default.self}.opts.$comment(${msg}, ${schemaPath}, ${rootName}.schema)`); - } -} -function returnResults(it) { - const { gen, schemaEnv, validateName, ValidationError, opts } = it; - if (schemaEnv.$async) { - // TODO assign unevaluated - gen.if((0, codegen_1._) `${names_1.default.errors} === 0`, () => gen.return(names_1.default.data), () => gen.throw((0, codegen_1._) `new ${ValidationError}(${names_1.default.vErrors})`)); - } - else { - gen.assign((0, codegen_1._) `${validateName}.errors`, names_1.default.vErrors); - if (opts.unevaluated) - assignEvaluated(it); - gen.return((0, codegen_1._) `${names_1.default.errors} === 0`); - } -} -function assignEvaluated({ gen, evaluated, props, items }) { - if (props instanceof codegen_1.Name) - gen.assign((0, codegen_1._) `${evaluated}.props`, props); - if (items instanceof codegen_1.Name) - gen.assign((0, codegen_1._) `${evaluated}.items`, items); -} -function schemaKeywords(it, types, typeErrors, errsCount) { - const { gen, schema, data, allErrors, opts, self } = it; - const { RULES } = self; - if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1.schemaHasRulesButRef)(schema, RULES))) { - gen.block(() => keywordCode(it, "$ref", RULES.all.$ref.definition)); // TODO typecast - return; - } - if (!opts.jtd) - checkStrictTypes(it, types); - gen.block(() => { - for (const group of RULES.rules) - groupKeywords(group); - groupKeywords(RULES.post); - }); - function groupKeywords(group) { - if (!(0, applicability_1.shouldUseGroup)(schema, group)) - return; - if (group.type) { - gen.if((0, dataType_2.checkDataType)(group.type, data, opts.strictNumbers)); - iterateKeywords(it, group); - if (types.length === 1 && types[0] === group.type && typeErrors) { - gen.else(); - (0, dataType_2.reportTypeError)(it); - } - gen.endIf(); - } - else { - iterateKeywords(it, group); - } - // TODO make it "ok" call? - if (!allErrors) - gen.if((0, codegen_1._) `${names_1.default.errors} === ${errsCount || 0}`); - } -} -function iterateKeywords(it, group) { - const { gen, schema, opts: { useDefaults }, } = it; - if (useDefaults) - (0, defaults_1.assignDefaults)(it, group.type); - gen.block(() => { - for (const rule of group.rules) { - if ((0, applicability_1.shouldUseRule)(schema, rule)) { - keywordCode(it, rule.keyword, rule.definition, group.type); - } - } - }); -} -function checkStrictTypes(it, types) { - if (it.schemaEnv.meta || !it.opts.strictTypes) - return; - checkContextTypes(it, types); - if (!it.opts.allowUnionTypes) - checkMultipleTypes(it, types); - checkKeywordTypes(it, it.dataTypes); -} -function checkContextTypes(it, types) { - if (!types.length) - return; - if (!it.dataTypes.length) { - it.dataTypes = types; - return; - } - types.forEach((t) => { - if (!includesType(it.dataTypes, t)) { - strictTypesError(it, `type "${t}" not allowed by context "${it.dataTypes.join(",")}"`); - } - }); - it.dataTypes = it.dataTypes.filter((t) => includesType(types, t)); -} -function checkMultipleTypes(it, ts) { - if (ts.length > 1 && !(ts.length === 2 && ts.includes("null"))) { - strictTypesError(it, "use allowUnionTypes to allow union type keyword"); - } -} -function checkKeywordTypes(it, ts) { - const rules = it.self.RULES.all; - for (const keyword in rules) { - const rule = rules[keyword]; - if (typeof rule == "object" && (0, applicability_1.shouldUseRule)(it.schema, rule)) { - const { type } = rule.definition; - if (type.length && !type.some((t) => hasApplicableType(ts, t))) { - strictTypesError(it, `missing type "${type.join(",")}" for keyword "${keyword}"`); - } - } - } -} -function hasApplicableType(schTs, kwdT) { - return schTs.includes(kwdT) || (kwdT === "number" && schTs.includes("integer")); -} -function includesType(ts, t) { - return ts.includes(t) || (t === "integer" && ts.includes("number")); -} -function strictTypesError(it, msg) { - const schemaPath = it.schemaEnv.baseId + it.errSchemaPath; - msg += ` at "${schemaPath}" (strictTypes)`; - (0, util_1.checkStrictMode)(it, msg, it.opts.strictTypes); -} -class KeywordCxt { - constructor(it, def, keyword) { - (0, keyword_1.validateKeywordUsage)(it, def, keyword); - this.gen = it.gen; - this.allErrors = it.allErrors; - this.keyword = keyword; - this.data = it.data; - this.schema = it.schema[keyword]; - this.$data = def.$data && it.opts.$data && this.schema && this.schema.$data; - this.schemaValue = (0, util_1.schemaRefOrVal)(it, this.schema, keyword, this.$data); - this.schemaType = def.schemaType; - this.parentSchema = it.schema; - this.params = {}; - this.it = it; - this.def = def; - if (this.$data) { - this.schemaCode = it.gen.const("vSchema", getData(this.$data, it)); - } - else { - this.schemaCode = this.schemaValue; - if (!(0, keyword_1.validSchemaType)(this.schema, def.schemaType, def.allowUndefined)) { - throw new Error(`${keyword} value must be ${JSON.stringify(def.schemaType)}`); - } - } - if ("code" in def ? def.trackErrors : def.errors !== false) { - this.errsCount = it.gen.const("_errs", names_1.default.errors); - } - } - result(condition, successAction, failAction) { - this.failResult((0, codegen_1.not)(condition), successAction, failAction); - } - failResult(condition, successAction, failAction) { - this.gen.if(condition); - if (failAction) - failAction(); - else - this.error(); - if (successAction) { - this.gen.else(); - successAction(); - if (this.allErrors) - this.gen.endIf(); - } - else { - if (this.allErrors) - this.gen.endIf(); - else - this.gen.else(); - } - } - pass(condition, failAction) { - this.failResult((0, codegen_1.not)(condition), undefined, failAction); - } - fail(condition) { - if (condition === undefined) { - this.error(); - if (!this.allErrors) - this.gen.if(false); // this branch will be removed by gen.optimize - return; - } - this.gen.if(condition); - this.error(); - if (this.allErrors) - this.gen.endIf(); - else - this.gen.else(); - } - fail$data(condition) { - if (!this.$data) - return this.fail(condition); - const { schemaCode } = this; - this.fail((0, codegen_1._) `${schemaCode} !== undefined && (${(0, codegen_1.or)(this.invalid$data(), condition)})`); - } - error(append, errorParams, errorPaths) { - if (errorParams) { - this.setParams(errorParams); - this._error(append, errorPaths); - this.setParams({}); - return; - } - this._error(append, errorPaths); - } - _error(append, errorPaths) { - ; - (append ? errors_1.reportExtraError : errors_1.reportError)(this, this.def.error, errorPaths); - } - $dataError() { - (0, errors_1.reportError)(this, this.def.$dataError || errors_1.keyword$DataError); - } - reset() { - if (this.errsCount === undefined) - throw new Error('add "trackErrors" to keyword definition'); - (0, errors_1.resetErrorsCount)(this.gen, this.errsCount); - } - ok(cond) { - if (!this.allErrors) - this.gen.if(cond); - } - setParams(obj, assign) { - if (assign) - Object.assign(this.params, obj); - else - this.params = obj; - } - block$data(valid, codeBlock, $dataValid = codegen_1.nil) { - this.gen.block(() => { - this.check$data(valid, $dataValid); - codeBlock(); - }); - } - check$data(valid = codegen_1.nil, $dataValid = codegen_1.nil) { - if (!this.$data) - return; - const { gen, schemaCode, schemaType, def } = this; - gen.if((0, codegen_1.or)((0, codegen_1._) `${schemaCode} === undefined`, $dataValid)); - if (valid !== codegen_1.nil) - gen.assign(valid, true); - if (schemaType.length || def.validateSchema) { - gen.elseIf(this.invalid$data()); - this.$dataError(); - if (valid !== codegen_1.nil) - gen.assign(valid, false); - } - gen.else(); - } - invalid$data() { - const { gen, schemaCode, schemaType, def, it } = this; - return (0, codegen_1.or)(wrong$DataType(), invalid$DataSchema()); - function wrong$DataType() { - if (schemaType.length) { - /* istanbul ignore if */ - if (!(schemaCode instanceof codegen_1.Name)) - throw new Error("ajv implementation error"); - const st = Array.isArray(schemaType) ? schemaType : [schemaType]; - return (0, codegen_1._) `${(0, dataType_2.checkDataTypes)(st, schemaCode, it.opts.strictNumbers, dataType_2.DataType.Wrong)}`; - } - return codegen_1.nil; - } - function invalid$DataSchema() { - if (def.validateSchema) { - const validateSchemaRef = gen.scopeValue("validate$data", { ref: def.validateSchema }); // TODO value.code for standalone - return (0, codegen_1._) `!${validateSchemaRef}(${schemaCode})`; - } - return codegen_1.nil; - } - } - subschema(appl, valid) { - const subschema = (0, subschema_1.getSubschema)(this.it, appl); - (0, subschema_1.extendSubschemaData)(subschema, this.it, appl); - (0, subschema_1.extendSubschemaMode)(subschema, appl); - const nextContext = { ...this.it, ...subschema, items: undefined, props: undefined }; - subschemaCode(nextContext, valid); - return nextContext; - } - mergeEvaluated(schemaCxt, toName) { - const { it, gen } = this; - if (!it.opts.unevaluated) - return; - if (it.props !== true && schemaCxt.props !== undefined) { - it.props = util_1.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName); - } - if (it.items !== true && schemaCxt.items !== undefined) { - it.items = util_1.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName); - } - } - mergeValidEvaluated(schemaCxt, valid) { - const { it, gen } = this; - if (it.opts.unevaluated && (it.props !== true || it.items !== true)) { - gen.if(valid, () => this.mergeEvaluated(schemaCxt, codegen_1.Name)); - return true; - } - } -} -exports.KeywordCxt = KeywordCxt; -function keywordCode(it, keyword, def, ruleType) { - const cxt = new KeywordCxt(it, def, keyword); - if ("code" in def) { - def.code(cxt, ruleType); - } - else if (cxt.$data && def.validate) { - (0, keyword_1.funcKeywordCode)(cxt, def); - } - else if ("macro" in def) { - (0, keyword_1.macroKeywordCode)(cxt, def); - } - else if (def.compile || def.validate) { - (0, keyword_1.funcKeywordCode)(cxt, def); - } -} -const JSON_POINTER = /^\/(?:[^~]|~0|~1)*$/; -const RELATIVE_JSON_POINTER = /^([0-9]+)(#|\/(?:[^~]|~0|~1)*)?$/; -function getData($data, { dataLevel, dataNames, dataPathArr }) { - let jsonPointer; - let data; - if ($data === "") - return names_1.default.rootData; - if ($data[0] === "/") { - if (!JSON_POINTER.test($data)) - throw new Error(`Invalid JSON-pointer: ${$data}`); - jsonPointer = $data; - data = names_1.default.rootData; - } - else { - const matches = RELATIVE_JSON_POINTER.exec($data); - if (!matches) - throw new Error(`Invalid JSON-pointer: ${$data}`); - const up = +matches[1]; - jsonPointer = matches[2]; - if (jsonPointer === "#") { - if (up >= dataLevel) - throw new Error(errorMsg("property/index", up)); - return dataPathArr[dataLevel - up]; - } - if (up > dataLevel) - throw new Error(errorMsg("data", up)); - data = dataNames[dataLevel - up]; - if (!jsonPointer) - return data; - } - let expr = data; - const segments = jsonPointer.split("/"); - for (const segment of segments) { - if (segment) { - data = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)((0, util_1.unescapeJsonPointer)(segment))}`; - expr = (0, codegen_1._) `${expr} && ${data}`; - } - } - return expr; - function errorMsg(pointerType, up) { - return `Cannot access ${pointerType} ${up} levels up, current level is ${dataLevel}`; - } -} -exports.getData = getData; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 8732: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateKeywordUsage = exports.validSchemaType = exports.funcKeywordCode = exports.macroKeywordCode = void 0; -const codegen_1 = __nccwpck_require__(9179); -const names_1 = __nccwpck_require__(50); -const code_1 = __nccwpck_require__(4205); -const errors_1 = __nccwpck_require__(6150); -function macroKeywordCode(cxt, def) { - const { gen, keyword, schema, parentSchema, it } = cxt; - const macroSchema = def.macro.call(it.self, schema, parentSchema, it); - const schemaRef = useKeyword(gen, keyword, macroSchema); - if (it.opts.validateSchema !== false) - it.self.validateSchema(macroSchema, true); - const valid = gen.name("valid"); - cxt.subschema({ - schema: macroSchema, - schemaPath: codegen_1.nil, - errSchemaPath: `${it.errSchemaPath}/${keyword}`, - topSchemaRef: schemaRef, - compositeRule: true, - }, valid); - cxt.pass(valid, () => cxt.error(true)); -} -exports.macroKeywordCode = macroKeywordCode; -function funcKeywordCode(cxt, def) { - var _a; - const { gen, keyword, schema, parentSchema, $data, it } = cxt; - checkAsyncKeyword(it, def); - const validate = !$data && def.compile ? def.compile.call(it.self, schema, parentSchema, it) : def.validate; - const validateRef = useKeyword(gen, keyword, validate); - const valid = gen.let("valid"); - cxt.block$data(valid, validateKeyword); - cxt.ok((_a = def.valid) !== null && _a !== void 0 ? _a : valid); - function validateKeyword() { - if (def.errors === false) { - assignValid(); - if (def.modifying) - modifyData(cxt); - reportErrs(() => cxt.error()); - } - else { - const ruleErrs = def.async ? validateAsync() : validateSync(); - if (def.modifying) - modifyData(cxt); - reportErrs(() => addErrs(cxt, ruleErrs)); - } - } - function validateAsync() { - const ruleErrs = gen.let("ruleErrs", null); - gen.try(() => assignValid((0, codegen_1._) `await `), (e) => gen.assign(valid, false).if((0, codegen_1._) `${e} instanceof ${it.ValidationError}`, () => gen.assign(ruleErrs, (0, codegen_1._) `${e}.errors`), () => gen.throw(e))); - return ruleErrs; - } - function validateSync() { - const validateErrs = (0, codegen_1._) `${validateRef}.errors`; - gen.assign(validateErrs, null); - assignValid(codegen_1.nil); - return validateErrs; - } - function assignValid(_await = def.async ? (0, codegen_1._) `await ` : codegen_1.nil) { - const passCxt = it.opts.passContext ? names_1.default.this : names_1.default.self; - const passSchema = !(("compile" in def && !$data) || def.schema === false); - gen.assign(valid, (0, codegen_1._) `${_await}${(0, code_1.callValidateCode)(cxt, validateRef, passCxt, passSchema)}`, def.modifying); - } - function reportErrs(errors) { - var _a; - gen.if((0, codegen_1.not)((_a = def.valid) !== null && _a !== void 0 ? _a : valid), errors); - } -} -exports.funcKeywordCode = funcKeywordCode; -function modifyData(cxt) { - const { gen, data, it } = cxt; - gen.if(it.parentData, () => gen.assign(data, (0, codegen_1._) `${it.parentData}[${it.parentDataProperty}]`)); -} -function addErrs(cxt, errs) { - const { gen } = cxt; - gen.if((0, codegen_1._) `Array.isArray(${errs})`, () => { - gen - .assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`) - .assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`); - (0, errors_1.extendErrors)(cxt); - }, () => cxt.error()); -} -function checkAsyncKeyword({ schemaEnv }, def) { - if (def.async && !schemaEnv.$async) - throw new Error("async keyword in sync schema"); -} -function useKeyword(gen, keyword, result) { - if (result === undefined) - throw new Error(`keyword "${keyword}" failed to compile`); - return gen.scopeValue("keyword", typeof result == "function" ? { ref: result } : { ref: result, code: (0, codegen_1.stringify)(result) }); -} -function validSchemaType(schema, schemaType, allowUndefined = false) { - // TODO add tests - return (!schemaType.length || - schemaType.some((st) => st === "array" - ? Array.isArray(schema) - : st === "object" - ? schema && typeof schema == "object" && !Array.isArray(schema) - : typeof schema == st || (allowUndefined && typeof schema == "undefined"))); -} -exports.validSchemaType = validSchemaType; -function validateKeywordUsage({ schema, opts, self, errSchemaPath }, def, keyword) { - /* istanbul ignore if */ - if (Array.isArray(def.keyword) ? !def.keyword.includes(keyword) : def.keyword !== keyword) { - throw new Error("ajv implementation error"); - } - const deps = def.dependencies; - if (deps === null || deps === void 0 ? void 0 : deps.some((kwd) => !Object.prototype.hasOwnProperty.call(schema, kwd))) { - throw new Error(`parent schema must have dependencies of ${keyword}: ${deps.join(",")}`); - } - if (def.validateSchema) { - const valid = def.validateSchema(schema[keyword]); - if (!valid) { - const msg = `keyword "${keyword}" value is invalid at path "${errSchemaPath}": ` + - self.errorsText(def.validateSchema.errors); - if (opts.validateSchema === "log") - self.logger.error(msg); - else - throw new Error(msg); - } - } -} -exports.validateKeywordUsage = validateKeywordUsage; -//# sourceMappingURL=keyword.js.map - -/***/ }), - -/***/ 3896: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.extendSubschemaMode = exports.extendSubschemaData = exports.getSubschema = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -function getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPath, topSchemaRef }) { - if (keyword !== undefined && schema !== undefined) { - throw new Error('both "keyword" and "schema" passed, only one allowed'); - } - if (keyword !== undefined) { - const sch = it.schema[keyword]; - return schemaProp === undefined - ? { - schema: sch, - schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}`, - errSchemaPath: `${it.errSchemaPath}/${keyword}`, - } - : { - schema: sch[schemaProp], - schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}${(0, codegen_1.getProperty)(schemaProp)}`, - errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1.escapeFragment)(schemaProp)}`, - }; - } - if (schema !== undefined) { - if (schemaPath === undefined || errSchemaPath === undefined || topSchemaRef === undefined) { - throw new Error('"schemaPath", "errSchemaPath" and "topSchemaRef" are required with "schema"'); - } - return { - schema, - schemaPath, - topSchemaRef, - errSchemaPath, - }; - } - throw new Error('either "keyword" or "schema" must be passed'); -} -exports.getSubschema = getSubschema; -function extendSubschemaData(subschema, it, { dataProp, dataPropType: dpType, data, dataTypes, propertyName }) { - if (data !== undefined && dataProp !== undefined) { - throw new Error('both "data" and "dataProp" passed, only one allowed'); - } - const { gen } = it; - if (dataProp !== undefined) { - const { errorPath, dataPathArr, opts } = it; - const nextData = gen.let("data", (0, codegen_1._) `${it.data}${(0, codegen_1.getProperty)(dataProp)}`, true); - dataContextProps(nextData); - subschema.errorPath = (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`; - subschema.parentDataProperty = (0, codegen_1._) `${dataProp}`; - subschema.dataPathArr = [...dataPathArr, subschema.parentDataProperty]; - } - if (data !== undefined) { - const nextData = data instanceof codegen_1.Name ? data : gen.let("data", data, true); // replaceable if used once? - dataContextProps(nextData); - if (propertyName !== undefined) - subschema.propertyName = propertyName; - // TODO something is possibly wrong here with not changing parentDataProperty and not appending dataPathArr - } - if (dataTypes) - subschema.dataTypes = dataTypes; - function dataContextProps(_nextData) { - subschema.data = _nextData; - subschema.dataLevel = it.dataLevel + 1; - subschema.dataTypes = []; - it.definedProperties = new Set(); - subschema.parentData = it.data; - subschema.dataNames = [...it.dataNames, _nextData]; - } -} -exports.extendSubschemaData = extendSubschemaData; -function extendSubschemaMode(subschema, { jtdDiscriminator, jtdMetadata, compositeRule, createErrors, allErrors }) { - if (compositeRule !== undefined) - subschema.compositeRule = compositeRule; - if (createErrors !== undefined) - subschema.createErrors = createErrors; - if (allErrors !== undefined) - subschema.allErrors = allErrors; - subschema.jtdDiscriminator = jtdDiscriminator; // not inherited - subschema.jtdMetadata = jtdMetadata; // not inherited -} -exports.extendSubschemaMode = extendSubschemaMode; -//# sourceMappingURL=subschema.js.map - -/***/ }), - -/***/ 2685: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0; -var validate_1 = __nccwpck_require__(8955); -Object.defineProperty(exports, "KeywordCxt", ({ enumerable: true, get: function () { return validate_1.KeywordCxt; } })); -var codegen_1 = __nccwpck_require__(9179); -Object.defineProperty(exports, "_", ({ enumerable: true, get: function () { return codegen_1._; } })); -Object.defineProperty(exports, "str", ({ enumerable: true, get: function () { return codegen_1.str; } })); -Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return codegen_1.stringify; } })); -Object.defineProperty(exports, "nil", ({ enumerable: true, get: function () { return codegen_1.nil; } })); -Object.defineProperty(exports, "Name", ({ enumerable: true, get: function () { return codegen_1.Name; } })); -Object.defineProperty(exports, "CodeGen", ({ enumerable: true, get: function () { return codegen_1.CodeGen; } })); -const validation_error_1 = __nccwpck_require__(7616); -const ref_error_1 = __nccwpck_require__(8190); -const rules_1 = __nccwpck_require__(240); -const compile_1 = __nccwpck_require__(813); -const codegen_2 = __nccwpck_require__(9179); -const resolve_1 = __nccwpck_require__(6646); -const dataType_1 = __nccwpck_require__(7725); -const util_1 = __nccwpck_require__(3439); -const $dataRefSchema = __nccwpck_require__(2228); -const uri_1 = __nccwpck_require__(661); -const defaultRegExp = (str, flags) => new RegExp(str, flags); -defaultRegExp.code = "new RegExp"; -const META_IGNORE_OPTIONS = ["removeAdditional", "useDefaults", "coerceTypes"]; -const EXT_SCOPE_NAMES = new Set([ - "validate", - "serialize", - "parse", - "wrapper", - "root", - "schema", - "keyword", - "pattern", - "formats", - "validate$data", - "func", - "obj", - "Error", -]); -const removedOptions = { - errorDataPath: "", - format: "`validateFormats: false` can be used instead.", - nullable: '"nullable" keyword is supported by default.', - jsonPointers: "Deprecated jsPropertySyntax can be used instead.", - extendRefs: "Deprecated ignoreKeywordsWithRef can be used instead.", - missingRefs: "Pass empty schema with $id that should be ignored to ajv.addSchema.", - processCode: "Use option `code: {process: (code, schemaEnv: object) => string}`", - sourceCode: "Use option `code: {source: true}`", - strictDefaults: "It is default now, see option `strict`.", - strictKeywords: "It is default now, see option `strict`.", - uniqueItems: '"uniqueItems" keyword is always validated.', - unknownFormats: "Disable strict mode or pass `true` to `ajv.addFormat` (or `formats` option).", - cache: "Map is used as cache, schema object as key.", - serialize: "Map is used as cache, schema object as key.", - ajvErrors: "It is default now.", -}; -const deprecatedOptions = { - ignoreKeywordsWithRef: "", - jsPropertySyntax: "", - unicode: '"minLength"/"maxLength" account for unicode characters by default.', -}; -const MAX_EXPRESSION = 200; -// eslint-disable-next-line complexity -function requiredOptions(o) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0; - const s = o.strict; - const _optz = (_a = o.code) === null || _a === void 0 ? void 0 : _a.optimize; - const optimize = _optz === true || _optz === undefined ? 1 : _optz || 0; - const regExp = (_c = (_b = o.code) === null || _b === void 0 ? void 0 : _b.regExp) !== null && _c !== void 0 ? _c : defaultRegExp; - const uriResolver = (_d = o.uriResolver) !== null && _d !== void 0 ? _d : uri_1.default; - return { - strictSchema: (_f = (_e = o.strictSchema) !== null && _e !== void 0 ? _e : s) !== null && _f !== void 0 ? _f : true, - strictNumbers: (_h = (_g = o.strictNumbers) !== null && _g !== void 0 ? _g : s) !== null && _h !== void 0 ? _h : true, - strictTypes: (_k = (_j = o.strictTypes) !== null && _j !== void 0 ? _j : s) !== null && _k !== void 0 ? _k : "log", - strictTuples: (_m = (_l = o.strictTuples) !== null && _l !== void 0 ? _l : s) !== null && _m !== void 0 ? _m : "log", - strictRequired: (_p = (_o = o.strictRequired) !== null && _o !== void 0 ? _o : s) !== null && _p !== void 0 ? _p : false, - code: o.code ? { ...o.code, optimize, regExp } : { optimize, regExp }, - loopRequired: (_q = o.loopRequired) !== null && _q !== void 0 ? _q : MAX_EXPRESSION, - loopEnum: (_r = o.loopEnum) !== null && _r !== void 0 ? _r : MAX_EXPRESSION, - meta: (_s = o.meta) !== null && _s !== void 0 ? _s : true, - messages: (_t = o.messages) !== null && _t !== void 0 ? _t : true, - inlineRefs: (_u = o.inlineRefs) !== null && _u !== void 0 ? _u : true, - schemaId: (_v = o.schemaId) !== null && _v !== void 0 ? _v : "$id", - addUsedSchema: (_w = o.addUsedSchema) !== null && _w !== void 0 ? _w : true, - validateSchema: (_x = o.validateSchema) !== null && _x !== void 0 ? _x : true, - validateFormats: (_y = o.validateFormats) !== null && _y !== void 0 ? _y : true, - unicodeRegExp: (_z = o.unicodeRegExp) !== null && _z !== void 0 ? _z : true, - int32range: (_0 = o.int32range) !== null && _0 !== void 0 ? _0 : true, - uriResolver: uriResolver, - }; -} -class Ajv { - constructor(opts = {}) { - this.schemas = {}; - this.refs = {}; - this.formats = {}; - this._compilations = new Set(); - this._loading = {}; - this._cache = new Map(); - opts = this.opts = { ...opts, ...requiredOptions(opts) }; - const { es5, lines } = this.opts.code; - this.scope = new codegen_2.ValueScope({ scope: {}, prefixes: EXT_SCOPE_NAMES, es5, lines }); - this.logger = getLogger(opts.logger); - const formatOpt = opts.validateFormats; - opts.validateFormats = false; - this.RULES = (0, rules_1.getRules)(); - checkOptions.call(this, removedOptions, opts, "NOT SUPPORTED"); - checkOptions.call(this, deprecatedOptions, opts, "DEPRECATED", "warn"); - this._metaOpts = getMetaSchemaOptions.call(this); - if (opts.formats) - addInitialFormats.call(this); - this._addVocabularies(); - this._addDefaultMetaSchema(); - if (opts.keywords) - addInitialKeywords.call(this, opts.keywords); - if (typeof opts.meta == "object") - this.addMetaSchema(opts.meta); - addInitialSchemas.call(this); - opts.validateFormats = formatOpt; - } - _addVocabularies() { - this.addKeyword("$async"); - } - _addDefaultMetaSchema() { - const { $data, meta, schemaId } = this.opts; - let _dataRefSchema = $dataRefSchema; - if (schemaId === "id") { - _dataRefSchema = { ...$dataRefSchema }; - _dataRefSchema.id = _dataRefSchema.$id; - delete _dataRefSchema.$id; - } - if (meta && $data) - this.addMetaSchema(_dataRefSchema, _dataRefSchema[schemaId], false); - } - defaultMeta() { - const { meta, schemaId } = this.opts; - return (this.opts.defaultMeta = typeof meta == "object" ? meta[schemaId] || meta : undefined); - } - validate(schemaKeyRef, // key, ref or schema object - data // to be validated - ) { - let v; - if (typeof schemaKeyRef == "string") { - v = this.getSchema(schemaKeyRef); - if (!v) - throw new Error(`no schema with key or ref "${schemaKeyRef}"`); - } - else { - v = this.compile(schemaKeyRef); - } - const valid = v(data); - if (!("$async" in v)) - this.errors = v.errors; - return valid; - } - compile(schema, _meta) { - const sch = this._addSchema(schema, _meta); - return (sch.validate || this._compileSchemaEnv(sch)); - } - compileAsync(schema, meta) { - if (typeof this.opts.loadSchema != "function") { - throw new Error("options.loadSchema should be a function"); - } - const { loadSchema } = this.opts; - return runCompileAsync.call(this, schema, meta); - async function runCompileAsync(_schema, _meta) { - await loadMetaSchema.call(this, _schema.$schema); - const sch = this._addSchema(_schema, _meta); - return sch.validate || _compileAsync.call(this, sch); - } - async function loadMetaSchema($ref) { - if ($ref && !this.getSchema($ref)) { - await runCompileAsync.call(this, { $ref }, true); - } - } - async function _compileAsync(sch) { - try { - return this._compileSchemaEnv(sch); - } - catch (e) { - if (!(e instanceof ref_error_1.default)) - throw e; - checkLoaded.call(this, e); - await loadMissingSchema.call(this, e.missingSchema); - return _compileAsync.call(this, sch); - } - } - function checkLoaded({ missingSchema: ref, missingRef }) { - if (this.refs[ref]) { - throw new Error(`AnySchema ${ref} is loaded but ${missingRef} cannot be resolved`); - } - } - async function loadMissingSchema(ref) { - const _schema = await _loadSchema.call(this, ref); - if (!this.refs[ref]) - await loadMetaSchema.call(this, _schema.$schema); - if (!this.refs[ref]) - this.addSchema(_schema, ref, meta); - } - async function _loadSchema(ref) { - const p = this._loading[ref]; - if (p) - return p; - try { - return await (this._loading[ref] = loadSchema(ref)); - } - finally { - delete this._loading[ref]; - } - } - } - // Adds schema to the instance - addSchema(schema, // If array is passed, `key` will be ignored - key, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`. - _meta, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead. - _validateSchema = this.opts.validateSchema // false to skip schema validation. Used internally, option validateSchema should be used instead. - ) { - if (Array.isArray(schema)) { - for (const sch of schema) - this.addSchema(sch, undefined, _meta, _validateSchema); - return this; - } - let id; - if (typeof schema === "object") { - const { schemaId } = this.opts; - id = schema[schemaId]; - if (id !== undefined && typeof id != "string") { - throw new Error(`schema ${schemaId} must be string`); - } - } - key = (0, resolve_1.normalizeId)(key || id); - this._checkUnique(key); - this.schemas[key] = this._addSchema(schema, _meta, key, _validateSchema, true); - return this; - } - // Add schema that will be used to validate other schemas - // options in META_IGNORE_OPTIONS are alway set to false - addMetaSchema(schema, key, // schema key - _validateSchema = this.opts.validateSchema // false to skip schema validation, can be used to override validateSchema option for meta-schema - ) { - this.addSchema(schema, key, true, _validateSchema); - return this; - } - // Validate schema against its meta-schema - validateSchema(schema, throwOrLogError) { - if (typeof schema == "boolean") - return true; - let $schema; - $schema = schema.$schema; - if ($schema !== undefined && typeof $schema != "string") { - throw new Error("$schema must be a string"); - } - $schema = $schema || this.opts.defaultMeta || this.defaultMeta(); - if (!$schema) { - this.logger.warn("meta-schema not available"); - this.errors = null; - return true; - } - const valid = this.validate($schema, schema); - if (!valid && throwOrLogError) { - const message = "schema is invalid: " + this.errorsText(); - if (this.opts.validateSchema === "log") - this.logger.error(message); - else - throw new Error(message); - } - return valid; - } - // Get compiled schema by `key` or `ref`. - // (`key` that was passed to `addSchema` or full schema reference - `schema.$id` or resolved id) - getSchema(keyRef) { - let sch; - while (typeof (sch = getSchEnv.call(this, keyRef)) == "string") - keyRef = sch; - if (sch === undefined) { - const { schemaId } = this.opts; - const root = new compile_1.SchemaEnv({ schema: {}, schemaId }); - sch = compile_1.resolveSchema.call(this, root, keyRef); - if (!sch) - return; - this.refs[keyRef] = sch; - } - return (sch.validate || this._compileSchemaEnv(sch)); - } - // Remove cached schema(s). - // If no parameter is passed all schemas but meta-schemas are removed. - // If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed. - // Even if schema is referenced by other schemas it still can be removed as other schemas have local references. - removeSchema(schemaKeyRef) { - if (schemaKeyRef instanceof RegExp) { - this._removeAllSchemas(this.schemas, schemaKeyRef); - this._removeAllSchemas(this.refs, schemaKeyRef); - return this; - } - switch (typeof schemaKeyRef) { - case "undefined": - this._removeAllSchemas(this.schemas); - this._removeAllSchemas(this.refs); - this._cache.clear(); - return this; - case "string": { - const sch = getSchEnv.call(this, schemaKeyRef); - if (typeof sch == "object") - this._cache.delete(sch.schema); - delete this.schemas[schemaKeyRef]; - delete this.refs[schemaKeyRef]; - return this; - } - case "object": { - const cacheKey = schemaKeyRef; - this._cache.delete(cacheKey); - let id = schemaKeyRef[this.opts.schemaId]; - if (id) { - id = (0, resolve_1.normalizeId)(id); - delete this.schemas[id]; - delete this.refs[id]; - } - return this; - } - default: - throw new Error("ajv.removeSchema: invalid parameter"); - } - } - // add "vocabulary" - a collection of keywords - addVocabulary(definitions) { - for (const def of definitions) - this.addKeyword(def); - return this; - } - addKeyword(kwdOrDef, def // deprecated - ) { - let keyword; - if (typeof kwdOrDef == "string") { - keyword = kwdOrDef; - if (typeof def == "object") { - this.logger.warn("these parameters are deprecated, see docs for addKeyword"); - def.keyword = keyword; - } - } - else if (typeof kwdOrDef == "object" && def === undefined) { - def = kwdOrDef; - keyword = def.keyword; - if (Array.isArray(keyword) && !keyword.length) { - throw new Error("addKeywords: keyword must be string or non-empty array"); - } - } - else { - throw new Error("invalid addKeywords parameters"); - } - checkKeyword.call(this, keyword, def); - if (!def) { - (0, util_1.eachItem)(keyword, (kwd) => addRule.call(this, kwd)); - return this; - } - keywordMetaschema.call(this, def); - const definition = { - ...def, - type: (0, dataType_1.getJSONTypes)(def.type), - schemaType: (0, dataType_1.getJSONTypes)(def.schemaType), - }; - (0, util_1.eachItem)(keyword, definition.type.length === 0 - ? (k) => addRule.call(this, k, definition) - : (k) => definition.type.forEach((t) => addRule.call(this, k, definition, t))); - return this; - } - getKeyword(keyword) { - const rule = this.RULES.all[keyword]; - return typeof rule == "object" ? rule.definition : !!rule; - } - // Remove keyword - removeKeyword(keyword) { - // TODO return type should be Ajv - const { RULES } = this; - delete RULES.keywords[keyword]; - delete RULES.all[keyword]; - for (const group of RULES.rules) { - const i = group.rules.findIndex((rule) => rule.keyword === keyword); - if (i >= 0) - group.rules.splice(i, 1); - } - return this; - } - // Add format - addFormat(name, format) { - if (typeof format == "string") - format = new RegExp(format); - this.formats[name] = format; - return this; - } - errorsText(errors = this.errors, // optional array of validation errors - { separator = ", ", dataVar = "data" } = {} // optional options with properties `separator` and `dataVar` - ) { - if (!errors || errors.length === 0) - return "No errors"; - return errors - .map((e) => `${dataVar}${e.instancePath} ${e.message}`) - .reduce((text, msg) => text + separator + msg); - } - $dataMetaSchema(metaSchema, keywordsJsonPointers) { - const rules = this.RULES.all; - metaSchema = JSON.parse(JSON.stringify(metaSchema)); - for (const jsonPointer of keywordsJsonPointers) { - const segments = jsonPointer.split("/").slice(1); // first segment is an empty string - let keywords = metaSchema; - for (const seg of segments) - keywords = keywords[seg]; - for (const key in rules) { - const rule = rules[key]; - if (typeof rule != "object") - continue; - const { $data } = rule.definition; - const schema = keywords[key]; - if ($data && schema) - keywords[key] = schemaOrData(schema); - } - } - return metaSchema; - } - _removeAllSchemas(schemas, regex) { - for (const keyRef in schemas) { - const sch = schemas[keyRef]; - if (!regex || regex.test(keyRef)) { - if (typeof sch == "string") { - delete schemas[keyRef]; - } - else if (sch && !sch.meta) { - this._cache.delete(sch.schema); - delete schemas[keyRef]; - } - } - } - } - _addSchema(schema, meta, baseId, validateSchema = this.opts.validateSchema, addSchema = this.opts.addUsedSchema) { - let id; - const { schemaId } = this.opts; - if (typeof schema == "object") { - id = schema[schemaId]; - } - else { - if (this.opts.jtd) - throw new Error("schema must be object"); - else if (typeof schema != "boolean") - throw new Error("schema must be object or boolean"); - } - let sch = this._cache.get(schema); - if (sch !== undefined) - return sch; - baseId = (0, resolve_1.normalizeId)(id || baseId); - const localRefs = resolve_1.getSchemaRefs.call(this, schema, baseId); - sch = new compile_1.SchemaEnv({ schema, schemaId, meta, baseId, localRefs }); - this._cache.set(sch.schema, sch); - if (addSchema && !baseId.startsWith("#")) { - // TODO atm it is allowed to overwrite schemas without id (instead of not adding them) - if (baseId) - this._checkUnique(baseId); - this.refs[baseId] = sch; - } - if (validateSchema) - this.validateSchema(schema, true); - return sch; - } - _checkUnique(id) { - if (this.schemas[id] || this.refs[id]) { - throw new Error(`schema with key or id "${id}" already exists`); - } - } - _compileSchemaEnv(sch) { - if (sch.meta) - this._compileMetaSchema(sch); - else - compile_1.compileSchema.call(this, sch); - /* istanbul ignore if */ - if (!sch.validate) - throw new Error("ajv implementation error"); - return sch.validate; - } - _compileMetaSchema(sch) { - const currentOpts = this.opts; - this.opts = this._metaOpts; - try { - compile_1.compileSchema.call(this, sch); - } - finally { - this.opts = currentOpts; - } - } -} -exports.default = Ajv; -Ajv.ValidationError = validation_error_1.default; -Ajv.MissingRefError = ref_error_1.default; -function checkOptions(checkOpts, options, msg, log = "error") { - for (const key in checkOpts) { - const opt = key; - if (opt in options) - this.logger[log](`${msg}: option ${key}. ${checkOpts[opt]}`); - } -} -function getSchEnv(keyRef) { - keyRef = (0, resolve_1.normalizeId)(keyRef); // TODO tests fail without this line - return this.schemas[keyRef] || this.refs[keyRef]; -} -function addInitialSchemas() { - const optsSchemas = this.opts.schemas; - if (!optsSchemas) - return; - if (Array.isArray(optsSchemas)) - this.addSchema(optsSchemas); - else - for (const key in optsSchemas) - this.addSchema(optsSchemas[key], key); -} -function addInitialFormats() { - for (const name in this.opts.formats) { - const format = this.opts.formats[name]; - if (format) - this.addFormat(name, format); - } -} -function addInitialKeywords(defs) { - if (Array.isArray(defs)) { - this.addVocabulary(defs); - return; - } - this.logger.warn("keywords option as map is deprecated, pass array"); - for (const keyword in defs) { - const def = defs[keyword]; - if (!def.keyword) - def.keyword = keyword; - this.addKeyword(def); - } -} -function getMetaSchemaOptions() { - const metaOpts = { ...this.opts }; - for (const opt of META_IGNORE_OPTIONS) - delete metaOpts[opt]; - return metaOpts; -} -const noLogs = { log() { }, warn() { }, error() { } }; -function getLogger(logger) { - if (logger === false) - return noLogs; - if (logger === undefined) - return console; - if (logger.log && logger.warn && logger.error) - return logger; - throw new Error("logger must implement log, warn and error methods"); -} -const KEYWORD_NAME = /^[a-z_$][a-z0-9_$:-]*$/i; -function checkKeyword(keyword, def) { - const { RULES } = this; - (0, util_1.eachItem)(keyword, (kwd) => { - if (RULES.keywords[kwd]) - throw new Error(`Keyword ${kwd} is already defined`); - if (!KEYWORD_NAME.test(kwd)) - throw new Error(`Keyword ${kwd} has invalid name`); - }); - if (!def) - return; - if (def.$data && !("code" in def || "validate" in def)) { - throw new Error('$data keyword must have "code" or "validate" function'); - } -} -function addRule(keyword, definition, dataType) { - var _a; - const post = definition === null || definition === void 0 ? void 0 : definition.post; - if (dataType && post) - throw new Error('keyword with "post" flag cannot have "type"'); - const { RULES } = this; - let ruleGroup = post ? RULES.post : RULES.rules.find(({ type: t }) => t === dataType); - if (!ruleGroup) { - ruleGroup = { type: dataType, rules: [] }; - RULES.rules.push(ruleGroup); - } - RULES.keywords[keyword] = true; - if (!definition) - return; - const rule = { - keyword, - definition: { - ...definition, - type: (0, dataType_1.getJSONTypes)(definition.type), - schemaType: (0, dataType_1.getJSONTypes)(definition.schemaType), - }, - }; - if (definition.before) - addBeforeRule.call(this, ruleGroup, rule, definition.before); - else - ruleGroup.rules.push(rule); - RULES.all[keyword] = rule; - (_a = definition.implements) === null || _a === void 0 ? void 0 : _a.forEach((kwd) => this.addKeyword(kwd)); -} -function addBeforeRule(ruleGroup, rule, before) { - const i = ruleGroup.rules.findIndex((_rule) => _rule.keyword === before); - if (i >= 0) { - ruleGroup.rules.splice(i, 0, rule); - } - else { - ruleGroup.rules.push(rule); - this.logger.warn(`rule ${before} is not defined`); - } -} -function keywordMetaschema(def) { - let { metaSchema } = def; - if (metaSchema === undefined) - return; - if (def.$data && this.opts.$data) - metaSchema = schemaOrData(metaSchema); - def.validateSchema = this.compile(metaSchema, true); -} -const $dataRef = { - $ref: "https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#", -}; -function schemaOrData(schema) { - return { anyOf: [schema, $dataRef] }; -} -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 3809: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -// https://github.com/ajv-validator/ajv/issues/889 -const equal = __nccwpck_require__(8206); -equal.code = 'require("ajv/dist/runtime/equal").default'; -exports.default = equal; -//# sourceMappingURL=equal.js.map - -/***/ }), - -/***/ 2470: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -// https://mathiasbynens.be/notes/javascript-encoding -// https://github.com/bestiejs/punycode.js - punycode.ucs2.decode -function ucs2length(str) { - const len = str.length; - let length = 0; - let pos = 0; - let value; - while (pos < len) { - length++; - value = str.charCodeAt(pos++); - if (value >= 0xd800 && value <= 0xdbff && pos < len) { - // high surrogate, and there is a next character - value = str.charCodeAt(pos); - if ((value & 0xfc00) === 0xdc00) - pos++; // low surrogate - } - } - return length; -} -exports.default = ucs2length; -ucs2length.code = 'require("ajv/dist/runtime/ucs2length").default'; -//# sourceMappingURL=ucs2length.js.map - -/***/ }), - -/***/ 661: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const uri = __nccwpck_require__(20); -uri.code = 'require("ajv/dist/runtime/uri").default'; -exports.default = uri; -//# sourceMappingURL=uri.js.map - -/***/ }), - -/***/ 7616: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -class ValidationError extends Error { - constructor(errors) { - super("validation failed"); - this.errors = errors; - this.ajv = this.validation = true; - } -} -exports.default = ValidationError; -//# sourceMappingURL=validation_error.js.map - -/***/ }), - -/***/ 4720: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateAdditionalItems = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`, - params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`, -}; -const def = { - keyword: "additionalItems", - type: "array", - schemaType: ["boolean", "object"], - before: "uniqueItems", - error, - code(cxt) { - const { parentSchema, it } = cxt; - const { items } = parentSchema; - if (!Array.isArray(items)) { - (0, util_1.checkStrictMode)(it, '"additionalItems" is ignored when "items" is not an array of schemas'); - return; - } - validateAdditionalItems(cxt, items); - }, -}; -function validateAdditionalItems(cxt, items) { - const { gen, schema, data, keyword, it } = cxt; - it.items = true; - const len = gen.const("len", (0, codegen_1._) `${data}.length`); - if (schema === false) { - cxt.setParams({ len: items.length }); - cxt.pass((0, codegen_1._) `${len} <= ${items.length}`); - } - else if (typeof schema == "object" && !(0, util_1.alwaysValidSchema)(it, schema)) { - const valid = gen.var("valid", (0, codegen_1._) `${len} <= ${items.length}`); // TODO var - gen.if((0, codegen_1.not)(valid), () => validateItems(valid)); - cxt.ok(valid); - } - function validateItems(valid) { - gen.forRange("i", items.length, len, (i) => { - cxt.subschema({ keyword, dataProp: i, dataPropType: util_1.Type.Num }, valid); - if (!it.allErrors) - gen.if((0, codegen_1.not)(valid), () => gen.break()); - }); - } -} -exports.validateAdditionalItems = validateAdditionalItems; -exports.default = def; -//# sourceMappingURL=additionalItems.js.map - -/***/ }), - -/***/ 3481: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const code_1 = __nccwpck_require__(4205); -const codegen_1 = __nccwpck_require__(9179); -const names_1 = __nccwpck_require__(50); -const util_1 = __nccwpck_require__(3439); -const error = { - message: "must NOT have additional properties", - params: ({ params }) => (0, codegen_1._) `{additionalProperty: ${params.additionalProperty}}`, -}; -const def = { - keyword: "additionalProperties", - type: ["object"], - schemaType: ["boolean", "object"], - allowUndefined: true, - trackErrors: true, - error, - code(cxt) { - const { gen, schema, parentSchema, data, errsCount, it } = cxt; - /* istanbul ignore if */ - if (!errsCount) - throw new Error("ajv implementation error"); - const { allErrors, opts } = it; - it.props = true; - if (opts.removeAdditional !== "all" && (0, util_1.alwaysValidSchema)(it, schema)) - return; - const props = (0, code_1.allSchemaProperties)(parentSchema.properties); - const patProps = (0, code_1.allSchemaProperties)(parentSchema.patternProperties); - checkAdditionalProperties(); - cxt.ok((0, codegen_1._) `${errsCount} === ${names_1.default.errors}`); - function checkAdditionalProperties() { - gen.forIn("key", data, (key) => { - if (!props.length && !patProps.length) - additionalPropertyCode(key); - else - gen.if(isAdditional(key), () => additionalPropertyCode(key)); - }); - } - function isAdditional(key) { - let definedProp; - if (props.length > 8) { - // TODO maybe an option instead of hard-coded 8? - const propsSchema = (0, util_1.schemaRefOrVal)(it, parentSchema.properties, "properties"); - definedProp = (0, code_1.isOwnProperty)(gen, propsSchema, key); - } - else if (props.length) { - definedProp = (0, codegen_1.or)(...props.map((p) => (0, codegen_1._) `${key} === ${p}`)); - } - else { - definedProp = codegen_1.nil; - } - if (patProps.length) { - definedProp = (0, codegen_1.or)(definedProp, ...patProps.map((p) => (0, codegen_1._) `${(0, code_1.usePattern)(cxt, p)}.test(${key})`)); - } - return (0, codegen_1.not)(definedProp); - } - function deleteAdditional(key) { - gen.code((0, codegen_1._) `delete ${data}[${key}]`); - } - function additionalPropertyCode(key) { - if (opts.removeAdditional === "all" || (opts.removeAdditional && schema === false)) { - deleteAdditional(key); - return; - } - if (schema === false) { - cxt.setParams({ additionalProperty: key }); - cxt.error(); - if (!allErrors) - gen.break(); - return; - } - if (typeof schema == "object" && !(0, util_1.alwaysValidSchema)(it, schema)) { - const valid = gen.name("valid"); - if (opts.removeAdditional === "failing") { - applyAdditionalSchema(key, valid, false); - gen.if((0, codegen_1.not)(valid), () => { - cxt.reset(); - deleteAdditional(key); - }); - } - else { - applyAdditionalSchema(key, valid); - if (!allErrors) - gen.if((0, codegen_1.not)(valid), () => gen.break()); - } - } - } - function applyAdditionalSchema(key, valid, errors) { - const subschema = { - keyword: "additionalProperties", - dataProp: key, - dataPropType: util_1.Type.Str, - }; - if (errors === false) { - Object.assign(subschema, { - compositeRule: true, - createErrors: false, - allErrors: false, - }); - } - cxt.subschema(subschema, valid); - } - }, -}; -exports.default = def; -//# sourceMappingURL=additionalProperties.js.map - -/***/ }), - -/***/ 8406: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const util_1 = __nccwpck_require__(3439); -const def = { - keyword: "allOf", - schemaType: "array", - code(cxt) { - const { gen, schema, it } = cxt; - /* istanbul ignore if */ - if (!Array.isArray(schema)) - throw new Error("ajv implementation error"); - const valid = gen.name("valid"); - schema.forEach((sch, i) => { - if ((0, util_1.alwaysValidSchema)(it, sch)) - return; - const schCxt = cxt.subschema({ keyword: "allOf", schemaProp: i }, valid); - cxt.ok(valid); - cxt.mergeEvaluated(schCxt); - }); - }, -}; -exports.default = def; -//# sourceMappingURL=allOf.js.map - -/***/ }), - -/***/ 8168: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const code_1 = __nccwpck_require__(4205); -const def = { - keyword: "anyOf", - schemaType: "array", - trackErrors: true, - code: code_1.validateUnion, - error: { message: "must match a schema in anyOf" }, -}; -exports.default = def; -//# sourceMappingURL=anyOf.js.map - -/***/ }), - -/***/ 9535: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: ({ params: { min, max } }) => max === undefined - ? (0, codegen_1.str) `must contain at least ${min} valid item(s)` - : (0, codegen_1.str) `must contain at least ${min} and no more than ${max} valid item(s)`, - params: ({ params: { min, max } }) => max === undefined ? (0, codegen_1._) `{minContains: ${min}}` : (0, codegen_1._) `{minContains: ${min}, maxContains: ${max}}`, -}; -const def = { - keyword: "contains", - type: "array", - schemaType: ["object", "boolean"], - before: "uniqueItems", - trackErrors: true, - error, - code(cxt) { - const { gen, schema, parentSchema, data, it } = cxt; - let min; - let max; - const { minContains, maxContains } = parentSchema; - if (it.opts.next) { - min = minContains === undefined ? 1 : minContains; - max = maxContains; - } - else { - min = 1; - } - const len = gen.const("len", (0, codegen_1._) `${data}.length`); - cxt.setParams({ min, max }); - if (max === undefined && min === 0) { - (0, util_1.checkStrictMode)(it, `"minContains" == 0 without "maxContains": "contains" keyword ignored`); - return; - } - if (max !== undefined && min > max) { - (0, util_1.checkStrictMode)(it, `"minContains" > "maxContains" is always invalid`); - cxt.fail(); - return; - } - if ((0, util_1.alwaysValidSchema)(it, schema)) { - let cond = (0, codegen_1._) `${len} >= ${min}`; - if (max !== undefined) - cond = (0, codegen_1._) `${cond} && ${len} <= ${max}`; - cxt.pass(cond); - return; - } - it.items = true; - const valid = gen.name("valid"); - if (max === undefined && min === 1) { - validateItems(valid, () => gen.if(valid, () => gen.break())); - } - else if (min === 0) { - gen.let(valid, true); - if (max !== undefined) - gen.if((0, codegen_1._) `${data}.length > 0`, validateItemsWithCount); - } - else { - gen.let(valid, false); - validateItemsWithCount(); - } - cxt.result(valid, () => cxt.reset()); - function validateItemsWithCount() { - const schValid = gen.name("_valid"); - const count = gen.let("count", 0); - validateItems(schValid, () => gen.if(schValid, () => checkLimits(count))); - } - function validateItems(_valid, block) { - gen.forRange("i", 0, len, (i) => { - cxt.subschema({ - keyword: "contains", - dataProp: i, - dataPropType: util_1.Type.Num, - compositeRule: true, - }, _valid); - block(); - }); - } - function checkLimits(count) { - gen.code((0, codegen_1._) `${count}++`); - if (max === undefined) { - gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true).break()); - } - else { - gen.if((0, codegen_1._) `${count} > ${max}`, () => gen.assign(valid, false).break()); - if (min === 1) - gen.assign(valid, true); - else - gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true)); - } - } - }, -}; -exports.default = def; -//# sourceMappingURL=contains.js.map - -/***/ }), - -/***/ 4611: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateSchemaDeps = exports.validatePropertyDeps = exports.error = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const code_1 = __nccwpck_require__(4205); -exports.error = { - message: ({ params: { property, depsCount, deps } }) => { - const property_ies = depsCount === 1 ? "property" : "properties"; - return (0, codegen_1.str) `must have ${property_ies} ${deps} when property ${property} is present`; - }, - params: ({ params: { property, depsCount, deps, missingProperty } }) => (0, codegen_1._) `{property: ${property}, - missingProperty: ${missingProperty}, - depsCount: ${depsCount}, - deps: ${deps}}`, // TODO change to reference -}; -const def = { - keyword: "dependencies", - type: "object", - schemaType: "object", - error: exports.error, - code(cxt) { - const [propDeps, schDeps] = splitDependencies(cxt); - validatePropertyDeps(cxt, propDeps); - validateSchemaDeps(cxt, schDeps); - }, -}; -function splitDependencies({ schema }) { - const propertyDeps = {}; - const schemaDeps = {}; - for (const key in schema) { - if (key === "__proto__") - continue; - const deps = Array.isArray(schema[key]) ? propertyDeps : schemaDeps; - deps[key] = schema[key]; - } - return [propertyDeps, schemaDeps]; -} -function validatePropertyDeps(cxt, propertyDeps = cxt.schema) { - const { gen, data, it } = cxt; - if (Object.keys(propertyDeps).length === 0) - return; - const missing = gen.let("missing"); - for (const prop in propertyDeps) { - const deps = propertyDeps[prop]; - if (deps.length === 0) - continue; - const hasProperty = (0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties); - cxt.setParams({ - property: prop, - depsCount: deps.length, - deps: deps.join(", "), - }); - if (it.allErrors) { - gen.if(hasProperty, () => { - for (const depProp of deps) { - (0, code_1.checkReportMissingProp)(cxt, depProp); - } - }); - } - else { - gen.if((0, codegen_1._) `${hasProperty} && (${(0, code_1.checkMissingProp)(cxt, deps, missing)})`); - (0, code_1.reportMissingProp)(cxt, missing); - gen.else(); - } - } -} -exports.validatePropertyDeps = validatePropertyDeps; -function validateSchemaDeps(cxt, schemaDeps = cxt.schema) { - const { gen, data, keyword, it } = cxt; - const valid = gen.name("valid"); - for (const prop in schemaDeps) { - if ((0, util_1.alwaysValidSchema)(it, schemaDeps[prop])) - continue; - gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties), () => { - const schCxt = cxt.subschema({ keyword, schemaProp: prop }, valid); - cxt.mergeValidEvaluated(schCxt, valid); - }, () => gen.var(valid, true) // TODO var - ); - cxt.ok(valid); - } -} -exports.validateSchemaDeps = validateSchemaDeps; -exports.default = def; -//# sourceMappingURL=dependencies.js.map - -/***/ }), - -/***/ 7701: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: ({ params }) => (0, codegen_1.str) `must match "${params.ifClause}" schema`, - params: ({ params }) => (0, codegen_1._) `{failingKeyword: ${params.ifClause}}`, -}; -const def = { - keyword: "if", - schemaType: ["object", "boolean"], - trackErrors: true, - error, - code(cxt) { - const { gen, parentSchema, it } = cxt; - if (parentSchema.then === undefined && parentSchema.else === undefined) { - (0, util_1.checkStrictMode)(it, '"if" without "then" and "else" is ignored'); - } - const hasThen = hasSchema(it, "then"); - const hasElse = hasSchema(it, "else"); - if (!hasThen && !hasElse) - return; - const valid = gen.let("valid", true); - const schValid = gen.name("_valid"); - validateIf(); - cxt.reset(); - if (hasThen && hasElse) { - const ifClause = gen.let("ifClause"); - cxt.setParams({ ifClause }); - gen.if(schValid, validateClause("then", ifClause), validateClause("else", ifClause)); - } - else if (hasThen) { - gen.if(schValid, validateClause("then")); - } - else { - gen.if((0, codegen_1.not)(schValid), validateClause("else")); - } - cxt.pass(valid, () => cxt.error(true)); - function validateIf() { - const schCxt = cxt.subschema({ - keyword: "if", - compositeRule: true, - createErrors: false, - allErrors: false, - }, schValid); - cxt.mergeEvaluated(schCxt); - } - function validateClause(keyword, ifClause) { - return () => { - const schCxt = cxt.subschema({ keyword }, schValid); - gen.assign(valid, schValid); - cxt.mergeValidEvaluated(schCxt, valid); - if (ifClause) - gen.assign(ifClause, (0, codegen_1._) `${keyword}`); - else - cxt.setParams({ ifClause: keyword }); - }; - } - }, -}; -function hasSchema(it, keyword) { - const schema = it.schema[keyword]; - return schema !== undefined && !(0, util_1.alwaysValidSchema)(it, schema); -} -exports.default = def; -//# sourceMappingURL=if.js.map - -/***/ }), - -/***/ 3048: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const additionalItems_1 = __nccwpck_require__(4720); -const prefixItems_1 = __nccwpck_require__(9498); -const items_1 = __nccwpck_require__(8008); -const items2020_1 = __nccwpck_require__(9084); -const contains_1 = __nccwpck_require__(9535); -const dependencies_1 = __nccwpck_require__(4611); -const propertyNames_1 = __nccwpck_require__(2554); -const additionalProperties_1 = __nccwpck_require__(3481); -const properties_1 = __nccwpck_require__(7666); -const patternProperties_1 = __nccwpck_require__(5157); -const not_1 = __nccwpck_require__(8720); -const anyOf_1 = __nccwpck_require__(8168); -const oneOf_1 = __nccwpck_require__(6434); -const allOf_1 = __nccwpck_require__(8406); -const if_1 = __nccwpck_require__(7701); -const thenElse_1 = __nccwpck_require__(7680); -function getApplicator(draft2020 = false) { - const applicator = [ - // any - not_1.default, - anyOf_1.default, - oneOf_1.default, - allOf_1.default, - if_1.default, - thenElse_1.default, - // object - propertyNames_1.default, - additionalProperties_1.default, - dependencies_1.default, - properties_1.default, - patternProperties_1.default, - ]; - // array - if (draft2020) - applicator.push(prefixItems_1.default, items2020_1.default); - else - applicator.push(additionalItems_1.default, items_1.default); - applicator.push(contains_1.default); - return applicator; -} -exports.default = getApplicator; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 8008: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateTuple = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const code_1 = __nccwpck_require__(4205); -const def = { - keyword: "items", - type: "array", - schemaType: ["object", "array", "boolean"], - before: "uniqueItems", - code(cxt) { - const { schema, it } = cxt; - if (Array.isArray(schema)) - return validateTuple(cxt, "additionalItems", schema); - it.items = true; - if ((0, util_1.alwaysValidSchema)(it, schema)) - return; - cxt.ok((0, code_1.validateArray)(cxt)); - }, -}; -function validateTuple(cxt, extraItems, schArr = cxt.schema) { - const { gen, parentSchema, data, keyword, it } = cxt; - checkStrictTuple(parentSchema); - if (it.opts.unevaluated && schArr.length && it.items !== true) { - it.items = util_1.mergeEvaluated.items(gen, schArr.length, it.items); - } - const valid = gen.name("valid"); - const len = gen.const("len", (0, codegen_1._) `${data}.length`); - schArr.forEach((sch, i) => { - if ((0, util_1.alwaysValidSchema)(it, sch)) - return; - gen.if((0, codegen_1._) `${len} > ${i}`, () => cxt.subschema({ - keyword, - schemaProp: i, - dataProp: i, - }, valid)); - cxt.ok(valid); - }); - function checkStrictTuple(sch) { - const { opts, errSchemaPath } = it; - const l = schArr.length; - const fullTuple = l === sch.minItems && (l === sch.maxItems || sch[extraItems] === false); - if (opts.strictTuples && !fullTuple) { - const msg = `"${keyword}" is ${l}-tuple, but minItems or maxItems/${extraItems} are not specified or different at path "${errSchemaPath}"`; - (0, util_1.checkStrictMode)(it, msg, opts.strictTuples); - } - } -} -exports.validateTuple = validateTuple; -exports.default = def; -//# sourceMappingURL=items.js.map - -/***/ }), - -/***/ 9084: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const code_1 = __nccwpck_require__(4205); -const additionalItems_1 = __nccwpck_require__(4720); -const error = { - message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`, - params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`, -}; -const def = { - keyword: "items", - type: "array", - schemaType: ["object", "boolean"], - before: "uniqueItems", - error, - code(cxt) { - const { schema, parentSchema, it } = cxt; - const { prefixItems } = parentSchema; - it.items = true; - if ((0, util_1.alwaysValidSchema)(it, schema)) - return; - if (prefixItems) - (0, additionalItems_1.validateAdditionalItems)(cxt, prefixItems); - else - cxt.ok((0, code_1.validateArray)(cxt)); - }, -}; -exports.default = def; -//# sourceMappingURL=items2020.js.map - -/***/ }), - -/***/ 8720: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const util_1 = __nccwpck_require__(3439); -const def = { - keyword: "not", - schemaType: ["object", "boolean"], - trackErrors: true, - code(cxt) { - const { gen, schema, it } = cxt; - if ((0, util_1.alwaysValidSchema)(it, schema)) { - cxt.fail(); - return; - } - const valid = gen.name("valid"); - cxt.subschema({ - keyword: "not", - compositeRule: true, - createErrors: false, - allErrors: false, - }, valid); - cxt.failResult(valid, () => cxt.reset(), () => cxt.error()); - }, - error: { message: "must NOT be valid" }, -}; -exports.default = def; -//# sourceMappingURL=not.js.map - -/***/ }), - -/***/ 6434: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: "must match exactly one schema in oneOf", - params: ({ params }) => (0, codegen_1._) `{passingSchemas: ${params.passing}}`, -}; -const def = { - keyword: "oneOf", - schemaType: "array", - trackErrors: true, - error, - code(cxt) { - const { gen, schema, parentSchema, it } = cxt; - /* istanbul ignore if */ - if (!Array.isArray(schema)) - throw new Error("ajv implementation error"); - if (it.opts.discriminator && parentSchema.discriminator) - return; - const schArr = schema; - const valid = gen.let("valid", false); - const passing = gen.let("passing", null); - const schValid = gen.name("_valid"); - cxt.setParams({ passing }); - // TODO possibly fail straight away (with warning or exception) if there are two empty always valid schemas - gen.block(validateOneOf); - cxt.result(valid, () => cxt.reset(), () => cxt.error(true)); - function validateOneOf() { - schArr.forEach((sch, i) => { - let schCxt; - if ((0, util_1.alwaysValidSchema)(it, sch)) { - gen.var(schValid, true); - } - else { - schCxt = cxt.subschema({ - keyword: "oneOf", - schemaProp: i, - compositeRule: true, - }, schValid); - } - if (i > 0) { - gen - .if((0, codegen_1._) `${schValid} && ${valid}`) - .assign(valid, false) - .assign(passing, (0, codegen_1._) `[${passing}, ${i}]`) - .else(); - } - gen.if(schValid, () => { - gen.assign(valid, true); - gen.assign(passing, i); - if (schCxt) - cxt.mergeEvaluated(schCxt, codegen_1.Name); - }); - }); - } - }, -}; -exports.default = def; -//# sourceMappingURL=oneOf.js.map - -/***/ }), - -/***/ 5157: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const code_1 = __nccwpck_require__(4205); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const util_2 = __nccwpck_require__(3439); -const def = { - keyword: "patternProperties", - type: "object", - schemaType: "object", - code(cxt) { - const { gen, schema, data, parentSchema, it } = cxt; - const { opts } = it; - const patterns = (0, code_1.allSchemaProperties)(schema); - const alwaysValidPatterns = patterns.filter((p) => (0, util_1.alwaysValidSchema)(it, schema[p])); - if (patterns.length === 0 || - (alwaysValidPatterns.length === patterns.length && - (!it.opts.unevaluated || it.props === true))) { - return; - } - const checkProperties = opts.strictSchema && !opts.allowMatchingProperties && parentSchema.properties; - const valid = gen.name("valid"); - if (it.props !== true && !(it.props instanceof codegen_1.Name)) { - it.props = (0, util_2.evaluatedPropsToName)(gen, it.props); - } - const { props } = it; - validatePatternProperties(); - function validatePatternProperties() { - for (const pat of patterns) { - if (checkProperties) - checkMatchingProperties(pat); - if (it.allErrors) { - validateProperties(pat); - } - else { - gen.var(valid, true); // TODO var - validateProperties(pat); - gen.if(valid); - } - } - } - function checkMatchingProperties(pat) { - for (const prop in checkProperties) { - if (new RegExp(pat).test(prop)) { - (0, util_1.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`); - } - } - } - function validateProperties(pat) { - gen.forIn("key", data, (key) => { - gen.if((0, codegen_1._) `${(0, code_1.usePattern)(cxt, pat)}.test(${key})`, () => { - const alwaysValid = alwaysValidPatterns.includes(pat); - if (!alwaysValid) { - cxt.subschema({ - keyword: "patternProperties", - schemaProp: pat, - dataProp: key, - dataPropType: util_2.Type.Str, - }, valid); - } - if (it.opts.unevaluated && props !== true) { - gen.assign((0, codegen_1._) `${props}[${key}]`, true); - } - else if (!alwaysValid && !it.allErrors) { - // can short-circuit if `unevaluatedProperties` is not supported (opts.next === false) - // or if all properties were evaluated (props === true) - gen.if((0, codegen_1.not)(valid), () => gen.break()); - } - }); - }); - } - }, -}; -exports.default = def; -//# sourceMappingURL=patternProperties.js.map - -/***/ }), - -/***/ 9498: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const items_1 = __nccwpck_require__(8008); -const def = { - keyword: "prefixItems", - type: "array", - schemaType: ["array"], - before: "uniqueItems", - code: (cxt) => (0, items_1.validateTuple)(cxt, "items"), -}; -exports.default = def; -//# sourceMappingURL=prefixItems.js.map - -/***/ }), - -/***/ 7666: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const validate_1 = __nccwpck_require__(8955); -const code_1 = __nccwpck_require__(4205); -const util_1 = __nccwpck_require__(3439); -const additionalProperties_1 = __nccwpck_require__(3481); -const def = { - keyword: "properties", - type: "object", - schemaType: "object", - code(cxt) { - const { gen, schema, parentSchema, data, it } = cxt; - if (it.opts.removeAdditional === "all" && parentSchema.additionalProperties === undefined) { - additionalProperties_1.default.code(new validate_1.KeywordCxt(it, additionalProperties_1.default, "additionalProperties")); - } - const allProps = (0, code_1.allSchemaProperties)(schema); - for (const prop of allProps) { - it.definedProperties.add(prop); - } - if (it.opts.unevaluated && allProps.length && it.props !== true) { - it.props = util_1.mergeEvaluated.props(gen, (0, util_1.toHash)(allProps), it.props); - } - const properties = allProps.filter((p) => !(0, util_1.alwaysValidSchema)(it, schema[p])); - if (properties.length === 0) - return; - const valid = gen.name("valid"); - for (const prop of properties) { - if (hasDefault(prop)) { - applyPropertySchema(prop); - } - else { - gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties)); - applyPropertySchema(prop); - if (!it.allErrors) - gen.else().var(valid, true); - gen.endIf(); - } - cxt.it.definedProperties.add(prop); - cxt.ok(valid); - } - function hasDefault(prop) { - return it.opts.useDefaults && !it.compositeRule && schema[prop].default !== undefined; - } - function applyPropertySchema(prop) { - cxt.subschema({ - keyword: "properties", - schemaProp: prop, - dataProp: prop, - }, valid); - } - }, -}; -exports.default = def; -//# sourceMappingURL=properties.js.map - -/***/ }), - -/***/ 2554: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: "property name must be valid", - params: ({ params }) => (0, codegen_1._) `{propertyName: ${params.propertyName}}`, -}; -const def = { - keyword: "propertyNames", - type: "object", - schemaType: ["object", "boolean"], - error, - code(cxt) { - const { gen, schema, data, it } = cxt; - if ((0, util_1.alwaysValidSchema)(it, schema)) - return; - const valid = gen.name("valid"); - gen.forIn("key", data, (key) => { - cxt.setParams({ propertyName: key }); - cxt.subschema({ - keyword: "propertyNames", - data: key, - dataTypes: ["string"], - propertyName: key, - compositeRule: true, - }, valid); - gen.if((0, codegen_1.not)(valid), () => { - cxt.error(true); - if (!it.allErrors) - gen.break(); - }); - }); - cxt.ok(valid); - }, -}; -exports.default = def; -//# sourceMappingURL=propertyNames.js.map - -/***/ }), - -/***/ 7680: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const util_1 = __nccwpck_require__(3439); -const def = { - keyword: ["then", "else"], - schemaType: ["object", "boolean"], - code({ keyword, parentSchema, it }) { - if (parentSchema.if === undefined) - (0, util_1.checkStrictMode)(it, `"${keyword}" without "if" is ignored`); - }, -}; -exports.default = def; -//# sourceMappingURL=thenElse.js.map - -/***/ }), - -/***/ 4205: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateUnion = exports.validateArray = exports.usePattern = exports.callValidateCode = exports.schemaProperties = exports.allSchemaProperties = exports.noPropertyInData = exports.propertyInData = exports.isOwnProperty = exports.hasPropFunc = exports.reportMissingProp = exports.checkMissingProp = exports.checkReportMissingProp = void 0; -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const names_1 = __nccwpck_require__(50); -const util_2 = __nccwpck_require__(3439); -function checkReportMissingProp(cxt, prop) { - const { gen, data, it } = cxt; - gen.if(noPropertyInData(gen, data, prop, it.opts.ownProperties), () => { - cxt.setParams({ missingProperty: (0, codegen_1._) `${prop}` }, true); - cxt.error(); - }); -} -exports.checkReportMissingProp = checkReportMissingProp; -function checkMissingProp({ gen, data, it: { opts } }, properties, missing) { - return (0, codegen_1.or)(...properties.map((prop) => (0, codegen_1.and)(noPropertyInData(gen, data, prop, opts.ownProperties), (0, codegen_1._) `${missing} = ${prop}`))); -} -exports.checkMissingProp = checkMissingProp; -function reportMissingProp(cxt, missing) { - cxt.setParams({ missingProperty: missing }, true); - cxt.error(); -} -exports.reportMissingProp = reportMissingProp; -function hasPropFunc(gen) { - return gen.scopeValue("func", { - // eslint-disable-next-line @typescript-eslint/unbound-method - ref: Object.prototype.hasOwnProperty, - code: (0, codegen_1._) `Object.prototype.hasOwnProperty`, - }); -} -exports.hasPropFunc = hasPropFunc; -function isOwnProperty(gen, data, property) { - return (0, codegen_1._) `${hasPropFunc(gen)}.call(${data}, ${property})`; -} -exports.isOwnProperty = isOwnProperty; -function propertyInData(gen, data, property, ownProperties) { - const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} !== undefined`; - return ownProperties ? (0, codegen_1._) `${cond} && ${isOwnProperty(gen, data, property)}` : cond; -} -exports.propertyInData = propertyInData; -function noPropertyInData(gen, data, property, ownProperties) { - const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} === undefined`; - return ownProperties ? (0, codegen_1.or)(cond, (0, codegen_1.not)(isOwnProperty(gen, data, property))) : cond; -} -exports.noPropertyInData = noPropertyInData; -function allSchemaProperties(schemaMap) { - return schemaMap ? Object.keys(schemaMap).filter((p) => p !== "__proto__") : []; -} -exports.allSchemaProperties = allSchemaProperties; -function schemaProperties(it, schemaMap) { - return allSchemaProperties(schemaMap).filter((p) => !(0, util_1.alwaysValidSchema)(it, schemaMap[p])); -} -exports.schemaProperties = schemaProperties; -function callValidateCode({ schemaCode, data, it: { gen, topSchemaRef, schemaPath, errorPath }, it }, func, context, passSchema) { - const dataAndSchema = passSchema ? (0, codegen_1._) `${schemaCode}, ${data}, ${topSchemaRef}${schemaPath}` : data; - const valCxt = [ - [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, errorPath)], - [names_1.default.parentData, it.parentData], - [names_1.default.parentDataProperty, it.parentDataProperty], - [names_1.default.rootData, names_1.default.rootData], - ]; - if (it.opts.dynamicRef) - valCxt.push([names_1.default.dynamicAnchors, names_1.default.dynamicAnchors]); - const args = (0, codegen_1._) `${dataAndSchema}, ${gen.object(...valCxt)}`; - return context !== codegen_1.nil ? (0, codegen_1._) `${func}.call(${context}, ${args})` : (0, codegen_1._) `${func}(${args})`; -} -exports.callValidateCode = callValidateCode; -const newRegExp = (0, codegen_1._) `new RegExp`; -function usePattern({ gen, it: { opts } }, pattern) { - const u = opts.unicodeRegExp ? "u" : ""; - const { regExp } = opts.code; - const rx = regExp(pattern, u); - return gen.scopeValue("pattern", { - key: rx.toString(), - ref: rx, - code: (0, codegen_1._) `${regExp.code === "new RegExp" ? newRegExp : (0, util_2.useFunc)(gen, regExp)}(${pattern}, ${u})`, - }); -} -exports.usePattern = usePattern; -function validateArray(cxt) { - const { gen, data, keyword, it } = cxt; - const valid = gen.name("valid"); - if (it.allErrors) { - const validArr = gen.let("valid", true); - validateItems(() => gen.assign(validArr, false)); - return validArr; - } - gen.var(valid, true); - validateItems(() => gen.break()); - return valid; - function validateItems(notValid) { - const len = gen.const("len", (0, codegen_1._) `${data}.length`); - gen.forRange("i", 0, len, (i) => { - cxt.subschema({ - keyword, - dataProp: i, - dataPropType: util_1.Type.Num, - }, valid); - gen.if((0, codegen_1.not)(valid), notValid); - }); - } -} -exports.validateArray = validateArray; -function validateUnion(cxt) { - const { gen, schema, keyword, it } = cxt; - /* istanbul ignore if */ - if (!Array.isArray(schema)) - throw new Error("ajv implementation error"); - const alwaysValid = schema.some((sch) => (0, util_1.alwaysValidSchema)(it, sch)); - if (alwaysValid && !it.opts.unevaluated) - return; - const valid = gen.let("valid", false); - const schValid = gen.name("_valid"); - gen.block(() => schema.forEach((_sch, i) => { - const schCxt = cxt.subschema({ - keyword, - schemaProp: i, - compositeRule: true, - }, schValid); - gen.assign(valid, (0, codegen_1._) `${valid} || ${schValid}`); - const merged = cxt.mergeValidEvaluated(schCxt, schValid); - // can short-circuit if `unevaluatedProperties/Items` not supported (opts.unevaluated !== true) - // or if all properties and items were evaluated (it.props === true && it.items === true) - if (!merged) - gen.if((0, codegen_1.not)(valid)); - })); - cxt.result(valid, () => cxt.reset(), () => cxt.error(true)); -} -exports.validateUnion = validateUnion; -//# sourceMappingURL=code.js.map - -/***/ }), - -/***/ 1674: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const def = { - keyword: "id", - code() { - throw new Error('NOT SUPPORTED: keyword "id", use "$id" for schema ID'); - }, -}; -exports.default = def; -//# sourceMappingURL=id.js.map - -/***/ }), - -/***/ 3707: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const id_1 = __nccwpck_require__(1674); -const ref_1 = __nccwpck_require__(6532); -const core = [ - "$schema", - "$id", - "$defs", - "$vocabulary", - { keyword: "$comment" }, - "definitions", - id_1.default, - ref_1.default, -]; -exports.default = core; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 6532: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.callRef = exports.getValidate = void 0; -const ref_error_1 = __nccwpck_require__(8190); -const code_1 = __nccwpck_require__(4205); -const codegen_1 = __nccwpck_require__(9179); -const names_1 = __nccwpck_require__(50); -const compile_1 = __nccwpck_require__(813); -const util_1 = __nccwpck_require__(3439); -const def = { - keyword: "$ref", - schemaType: "string", - code(cxt) { - const { gen, schema: $ref, it } = cxt; - const { baseId, schemaEnv: env, validateName, opts, self } = it; - const { root } = env; - if (($ref === "#" || $ref === "#/") && baseId === root.baseId) - return callRootRef(); - const schOrEnv = compile_1.resolveRef.call(self, root, baseId, $ref); - if (schOrEnv === undefined) - throw new ref_error_1.default(it.opts.uriResolver, baseId, $ref); - if (schOrEnv instanceof compile_1.SchemaEnv) - return callValidate(schOrEnv); - return inlineRefSchema(schOrEnv); - function callRootRef() { - if (env === root) - return callRef(cxt, validateName, env, env.$async); - const rootName = gen.scopeValue("root", { ref: root }); - return callRef(cxt, (0, codegen_1._) `${rootName}.validate`, root, root.$async); - } - function callValidate(sch) { - const v = getValidate(cxt, sch); - callRef(cxt, v, sch, sch.$async); - } - function inlineRefSchema(sch) { - const schName = gen.scopeValue("schema", opts.code.source === true ? { ref: sch, code: (0, codegen_1.stringify)(sch) } : { ref: sch }); - const valid = gen.name("valid"); - const schCxt = cxt.subschema({ - schema: sch, - dataTypes: [], - schemaPath: codegen_1.nil, - topSchemaRef: schName, - errSchemaPath: $ref, - }, valid); - cxt.mergeEvaluated(schCxt); - cxt.ok(valid); - } - }, -}; -function getValidate(cxt, sch) { - const { gen } = cxt; - return sch.validate - ? gen.scopeValue("validate", { ref: sch.validate }) - : (0, codegen_1._) `${gen.scopeValue("wrapper", { ref: sch })}.validate`; -} -exports.getValidate = getValidate; -function callRef(cxt, v, sch, $async) { - const { gen, it } = cxt; - const { allErrors, schemaEnv: env, opts } = it; - const passCxt = opts.passContext ? names_1.default.this : codegen_1.nil; - if ($async) - callAsyncRef(); - else - callSyncRef(); - function callAsyncRef() { - if (!env.$async) - throw new Error("async schema referenced by sync schema"); - const valid = gen.let("valid"); - gen.try(() => { - gen.code((0, codegen_1._) `await ${(0, code_1.callValidateCode)(cxt, v, passCxt)}`); - addEvaluatedFrom(v); // TODO will not work with async, it has to be returned with the result - if (!allErrors) - gen.assign(valid, true); - }, (e) => { - gen.if((0, codegen_1._) `!(${e} instanceof ${it.ValidationError})`, () => gen.throw(e)); - addErrorsFrom(e); - if (!allErrors) - gen.assign(valid, false); - }); - cxt.ok(valid); - } - function callSyncRef() { - cxt.result((0, code_1.callValidateCode)(cxt, v, passCxt), () => addEvaluatedFrom(v), () => addErrorsFrom(v)); - } - function addErrorsFrom(source) { - const errs = (0, codegen_1._) `${source}.errors`; - gen.assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`); // TODO tagged - gen.assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`); - } - function addEvaluatedFrom(source) { - var _a; - if (!it.opts.unevaluated) - return; - const schEvaluated = (_a = sch === null || sch === void 0 ? void 0 : sch.validate) === null || _a === void 0 ? void 0 : _a.evaluated; - // TODO refactor - if (it.props !== true) { - if (schEvaluated && !schEvaluated.dynamicProps) { - if (schEvaluated.props !== undefined) { - it.props = util_1.mergeEvaluated.props(gen, schEvaluated.props, it.props); - } - } - else { - const props = gen.var("props", (0, codegen_1._) `${source}.evaluated.props`); - it.props = util_1.mergeEvaluated.props(gen, props, it.props, codegen_1.Name); - } - } - if (it.items !== true) { - if (schEvaluated && !schEvaluated.dynamicItems) { - if (schEvaluated.items !== undefined) { - it.items = util_1.mergeEvaluated.items(gen, schEvaluated.items, it.items); - } - } - else { - const items = gen.var("items", (0, codegen_1._) `${source}.evaluated.items`); - it.items = util_1.mergeEvaluated.items(gen, items, it.items, codegen_1.Name); - } - } - } -} -exports.callRef = callRef; -exports.default = def; -//# sourceMappingURL=ref.js.map - -/***/ }), - -/***/ 4025: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const types_1 = __nccwpck_require__(8374); -const compile_1 = __nccwpck_require__(813); -const util_1 = __nccwpck_require__(3439); -const error = { - message: ({ params: { discrError, tagName } }) => discrError === types_1.DiscrError.Tag - ? `tag "${tagName}" must be string` - : `value of tag "${tagName}" must be in oneOf`, - params: ({ params: { discrError, tag, tagName } }) => (0, codegen_1._) `{error: ${discrError}, tag: ${tagName}, tagValue: ${tag}}`, -}; -const def = { - keyword: "discriminator", - type: "object", - schemaType: "object", - error, - code(cxt) { - const { gen, data, schema, parentSchema, it } = cxt; - const { oneOf } = parentSchema; - if (!it.opts.discriminator) { - throw new Error("discriminator: requires discriminator option"); - } - const tagName = schema.propertyName; - if (typeof tagName != "string") - throw new Error("discriminator: requires propertyName"); - if (schema.mapping) - throw new Error("discriminator: mapping is not supported"); - if (!oneOf) - throw new Error("discriminator: requires oneOf keyword"); - const valid = gen.let("valid", false); - const tag = gen.const("tag", (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(tagName)}`); - gen.if((0, codegen_1._) `typeof ${tag} == "string"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1.DiscrError.Tag, tag, tagName })); - cxt.ok(valid); - function validateMapping() { - const mapping = getMapping(); - gen.if(false); - for (const tagValue in mapping) { - gen.elseIf((0, codegen_1._) `${tag} === ${tagValue}`); - gen.assign(valid, applyTagSchema(mapping[tagValue])); - } - gen.else(); - cxt.error(false, { discrError: types_1.DiscrError.Mapping, tag, tagName }); - gen.endIf(); - } - function applyTagSchema(schemaProp) { - const _valid = gen.name("valid"); - const schCxt = cxt.subschema({ keyword: "oneOf", schemaProp }, _valid); - cxt.mergeEvaluated(schCxt, codegen_1.Name); - return _valid; - } - function getMapping() { - var _a; - const oneOfMapping = {}; - const topRequired = hasRequired(parentSchema); - let tagRequired = true; - for (let i = 0; i < oneOf.length; i++) { - let sch = oneOf[i]; - if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1.schemaHasRulesButRef)(sch, it.self.RULES)) { - sch = compile_1.resolveRef.call(it.self, it.schemaEnv.root, it.baseId, sch === null || sch === void 0 ? void 0 : sch.$ref); - if (sch instanceof compile_1.SchemaEnv) - sch = sch.schema; - } - const propSch = (_a = sch === null || sch === void 0 ? void 0 : sch.properties) === null || _a === void 0 ? void 0 : _a[tagName]; - if (typeof propSch != "object") { - throw new Error(`discriminator: oneOf subschemas (or referenced schemas) must have "properties/${tagName}"`); - } - tagRequired = tagRequired && (topRequired || hasRequired(sch)); - addMappings(propSch, i); - } - if (!tagRequired) - throw new Error(`discriminator: "${tagName}" must be required`); - return oneOfMapping; - function hasRequired({ required }) { - return Array.isArray(required) && required.includes(tagName); - } - function addMappings(sch, i) { - if (sch.const) { - addMapping(sch.const, i); - } - else if (sch.enum) { - for (const tagValue of sch.enum) { - addMapping(tagValue, i); - } - } - else { - throw new Error(`discriminator: "properties/${tagName}" must have "const" or "enum"`); - } - } - function addMapping(tagValue, i) { - if (typeof tagValue != "string" || tagValue in oneOfMapping) { - throw new Error(`discriminator: "${tagName}" values must be unique strings`); - } - oneOfMapping[tagValue] = i; - } - } - }, -}; -exports.default = def; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 8374: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiscrError = void 0; -var DiscrError; -(function (DiscrError) { - DiscrError["Tag"] = "tag"; - DiscrError["Mapping"] = "mapping"; -})(DiscrError = exports.DiscrError || (exports.DiscrError = {})); -//# sourceMappingURL=types.js.map - -/***/ }), - -/***/ 691: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core_1 = __nccwpck_require__(3707); -const validation_1 = __nccwpck_require__(9805); -const applicator_1 = __nccwpck_require__(3048); -const format_1 = __nccwpck_require__(9841); -const metadata_1 = __nccwpck_require__(5799); -const draft7Vocabularies = [ - core_1.default, - validation_1.default, - (0, applicator_1.default)(), - format_1.default, - metadata_1.metadataVocabulary, - metadata_1.contentVocabulary, -]; -exports.default = draft7Vocabularies; -//# sourceMappingURL=draft7.js.map - -/***/ }), - -/***/ 3691: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const error = { - message: ({ schemaCode }) => (0, codegen_1.str) `must match format "${schemaCode}"`, - params: ({ schemaCode }) => (0, codegen_1._) `{format: ${schemaCode}}`, -}; -const def = { - keyword: "format", - type: ["number", "string"], - schemaType: "string", - $data: true, - error, - code(cxt, ruleType) { - const { gen, data, $data, schema, schemaCode, it } = cxt; - const { opts, errSchemaPath, schemaEnv, self } = it; - if (!opts.validateFormats) - return; - if ($data) - validate$DataFormat(); - else - validateFormat(); - function validate$DataFormat() { - const fmts = gen.scopeValue("formats", { - ref: self.formats, - code: opts.code.formats, - }); - const fDef = gen.const("fDef", (0, codegen_1._) `${fmts}[${schemaCode}]`); - const fType = gen.let("fType"); - const format = gen.let("format"); - // TODO simplify - gen.if((0, codegen_1._) `typeof ${fDef} == "object" && !(${fDef} instanceof RegExp)`, () => gen.assign(fType, (0, codegen_1._) `${fDef}.type || "string"`).assign(format, (0, codegen_1._) `${fDef}.validate`), () => gen.assign(fType, (0, codegen_1._) `"string"`).assign(format, fDef)); - cxt.fail$data((0, codegen_1.or)(unknownFmt(), invalidFmt())); - function unknownFmt() { - if (opts.strictSchema === false) - return codegen_1.nil; - return (0, codegen_1._) `${schemaCode} && !${format}`; - } - function invalidFmt() { - const callFormat = schemaEnv.$async - ? (0, codegen_1._) `(${fDef}.async ? await ${format}(${data}) : ${format}(${data}))` - : (0, codegen_1._) `${format}(${data})`; - const validData = (0, codegen_1._) `(typeof ${format} == "function" ? ${callFormat} : ${format}.test(${data}))`; - return (0, codegen_1._) `${format} && ${format} !== true && ${fType} === ${ruleType} && !${validData}`; - } - } - function validateFormat() { - const formatDef = self.formats[schema]; - if (!formatDef) { - unknownFormat(); - return; - } - if (formatDef === true) - return; - const [fmtType, format, fmtRef] = getFormat(formatDef); - if (fmtType === ruleType) - cxt.pass(validCondition()); - function unknownFormat() { - if (opts.strictSchema === false) { - self.logger.warn(unknownMsg()); - return; - } - throw new Error(unknownMsg()); - function unknownMsg() { - return `unknown format "${schema}" ignored in schema at path "${errSchemaPath}"`; - } - } - function getFormat(fmtDef) { - const code = fmtDef instanceof RegExp - ? (0, codegen_1.regexpCode)(fmtDef) - : opts.code.formats - ? (0, codegen_1._) `${opts.code.formats}${(0, codegen_1.getProperty)(schema)}` - : undefined; - const fmt = gen.scopeValue("formats", { key: schema, ref: fmtDef, code }); - if (typeof fmtDef == "object" && !(fmtDef instanceof RegExp)) { - return [fmtDef.type || "string", fmtDef.validate, (0, codegen_1._) `${fmt}.validate`]; - } - return ["string", fmtDef, fmt]; - } - function validCondition() { - if (typeof formatDef == "object" && !(formatDef instanceof RegExp) && formatDef.async) { - if (!schemaEnv.$async) - throw new Error("async format in sync schema"); - return (0, codegen_1._) `await ${fmtRef}(${data})`; - } - return typeof format == "function" ? (0, codegen_1._) `${fmtRef}(${data})` : (0, codegen_1._) `${fmtRef}.test(${data})`; - } - } - }, -}; -exports.default = def; -//# sourceMappingURL=format.js.map - -/***/ }), - -/***/ 9841: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const format_1 = __nccwpck_require__(3691); -const format = [format_1.default]; -exports.default = format; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 5799: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.contentVocabulary = exports.metadataVocabulary = void 0; -exports.metadataVocabulary = [ - "title", - "description", - "default", - "deprecated", - "readOnly", - "writeOnly", - "examples", -]; -exports.contentVocabulary = [ - "contentMediaType", - "contentEncoding", - "contentSchema", -]; -//# sourceMappingURL=metadata.js.map - -/***/ }), - -/***/ 3694: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const equal_1 = __nccwpck_require__(3809); -const error = { - message: "must be equal to constant", - params: ({ schemaCode }) => (0, codegen_1._) `{allowedValue: ${schemaCode}}`, -}; -const def = { - keyword: "const", - $data: true, - error, - code(cxt) { - const { gen, data, $data, schemaCode, schema } = cxt; - if ($data || (schema && typeof schema == "object")) { - cxt.fail$data((0, codegen_1._) `!${(0, util_1.useFunc)(gen, equal_1.default)}(${data}, ${schemaCode})`); - } - else { - cxt.fail((0, codegen_1._) `${schema} !== ${data}`); - } - }, -}; -exports.default = def; -//# sourceMappingURL=const.js.map - -/***/ }), - -/***/ 5529: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const equal_1 = __nccwpck_require__(3809); -const error = { - message: "must be equal to one of the allowed values", - params: ({ schemaCode }) => (0, codegen_1._) `{allowedValues: ${schemaCode}}`, -}; -const def = { - keyword: "enum", - schemaType: "array", - $data: true, - error, - code(cxt) { - const { gen, data, $data, schema, schemaCode, it } = cxt; - if (!$data && schema.length === 0) - throw new Error("enum must have non-empty array"); - const useLoop = schema.length >= it.opts.loopEnum; - let eql; - const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1.useFunc)(gen, equal_1.default))); - let valid; - if (useLoop || $data) { - valid = gen.let("valid"); - cxt.block$data(valid, loopEnum); - } - else { - /* istanbul ignore if */ - if (!Array.isArray(schema)) - throw new Error("ajv implementation error"); - const vSchema = gen.const("vSchema", schemaCode); - valid = (0, codegen_1.or)(...schema.map((_x, i) => equalCode(vSchema, i))); - } - cxt.pass(valid); - function loopEnum() { - gen.assign(valid, false); - gen.forOf("v", schemaCode, (v) => gen.if((0, codegen_1._) `${getEql()}(${data}, ${v})`, () => gen.assign(valid, true).break())); - } - function equalCode(vSchema, i) { - const sch = schema[i]; - return typeof sch === "object" && sch !== null - ? (0, codegen_1._) `${getEql()}(${data}, ${vSchema}[${i}])` - : (0, codegen_1._) `${data} === ${sch}`; - } - }, -}; -exports.default = def; -//# sourceMappingURL=enum.js.map - -/***/ }), - -/***/ 9805: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const limitNumber_1 = __nccwpck_require__(345); -const multipleOf_1 = __nccwpck_require__(3201); -const limitLength_1 = __nccwpck_require__(7598); -const pattern_1 = __nccwpck_require__(4960); -const limitProperties_1 = __nccwpck_require__(3470); -const required_1 = __nccwpck_require__(3602); -const limitItems_1 = __nccwpck_require__(3924); -const uniqueItems_1 = __nccwpck_require__(9351); -const const_1 = __nccwpck_require__(3694); -const enum_1 = __nccwpck_require__(5529); -const validation = [ - // number - limitNumber_1.default, - multipleOf_1.default, - // string - limitLength_1.default, - pattern_1.default, - // object - limitProperties_1.default, - required_1.default, - // array - limitItems_1.default, - uniqueItems_1.default, - // any - { keyword: "type", schemaType: ["string", "array"] }, - { keyword: "nullable", schemaType: "boolean" }, - const_1.default, - enum_1.default, -]; -exports.default = validation; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 3924: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const error = { - message({ keyword, schemaCode }) { - const comp = keyword === "maxItems" ? "more" : "fewer"; - return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} items`; - }, - params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`, -}; -const def = { - keyword: ["maxItems", "minItems"], - type: "array", - schemaType: "number", - $data: true, - error, - code(cxt) { - const { keyword, data, schemaCode } = cxt; - const op = keyword === "maxItems" ? codegen_1.operators.GT : codegen_1.operators.LT; - cxt.fail$data((0, codegen_1._) `${data}.length ${op} ${schemaCode}`); - }, -}; -exports.default = def; -//# sourceMappingURL=limitItems.js.map - -/***/ }), - -/***/ 7598: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const ucs2length_1 = __nccwpck_require__(2470); -const error = { - message({ keyword, schemaCode }) { - const comp = keyword === "maxLength" ? "more" : "fewer"; - return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} characters`; - }, - params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`, -}; -const def = { - keyword: ["maxLength", "minLength"], - type: "string", - schemaType: "number", - $data: true, - error, - code(cxt) { - const { keyword, data, schemaCode, it } = cxt; - const op = keyword === "maxLength" ? codegen_1.operators.GT : codegen_1.operators.LT; - const len = it.opts.unicode === false ? (0, codegen_1._) `${data}.length` : (0, codegen_1._) `${(0, util_1.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`; - cxt.fail$data((0, codegen_1._) `${len} ${op} ${schemaCode}`); - }, -}; -exports.default = def; -//# sourceMappingURL=limitLength.js.map - -/***/ }), - -/***/ 345: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const ops = codegen_1.operators; -const KWDs = { - maximum: { okStr: "<=", ok: ops.LTE, fail: ops.GT }, - minimum: { okStr: ">=", ok: ops.GTE, fail: ops.LT }, - exclusiveMaximum: { okStr: "<", ok: ops.LT, fail: ops.GTE }, - exclusiveMinimum: { okStr: ">", ok: ops.GT, fail: ops.LTE }, -}; -const error = { - message: ({ keyword, schemaCode }) => (0, codegen_1.str) `must be ${KWDs[keyword].okStr} ${schemaCode}`, - params: ({ keyword, schemaCode }) => (0, codegen_1._) `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`, -}; -const def = { - keyword: Object.keys(KWDs), - type: "number", - schemaType: "number", - $data: true, - error, - code(cxt) { - const { keyword, data, schemaCode } = cxt; - cxt.fail$data((0, codegen_1._) `${data} ${KWDs[keyword].fail} ${schemaCode} || isNaN(${data})`); - }, -}; -exports.default = def; -//# sourceMappingURL=limitNumber.js.map - -/***/ }), - -/***/ 3470: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const error = { - message({ keyword, schemaCode }) { - const comp = keyword === "maxProperties" ? "more" : "fewer"; - return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} properties`; - }, - params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`, -}; -const def = { - keyword: ["maxProperties", "minProperties"], - type: "object", - schemaType: "number", - $data: true, - error, - code(cxt) { - const { keyword, data, schemaCode } = cxt; - const op = keyword === "maxProperties" ? codegen_1.operators.GT : codegen_1.operators.LT; - cxt.fail$data((0, codegen_1._) `Object.keys(${data}).length ${op} ${schemaCode}`); - }, -}; -exports.default = def; -//# sourceMappingURL=limitProperties.js.map - -/***/ }), - -/***/ 3201: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const codegen_1 = __nccwpck_require__(9179); -const error = { - message: ({ schemaCode }) => (0, codegen_1.str) `must be multiple of ${schemaCode}`, - params: ({ schemaCode }) => (0, codegen_1._) `{multipleOf: ${schemaCode}}`, -}; -const def = { - keyword: "multipleOf", - type: "number", - schemaType: "number", - $data: true, - error, - code(cxt) { - const { gen, data, schemaCode, it } = cxt; - // const bdt = bad$DataType(schemaCode, def.schemaType, $data) - const prec = it.opts.multipleOfPrecision; - const res = gen.let("res"); - const invalid = prec - ? (0, codegen_1._) `Math.abs(Math.round(${res}) - ${res}) > 1e-${prec}` - : (0, codegen_1._) `${res} !== parseInt(${res})`; - cxt.fail$data((0, codegen_1._) `(${schemaCode} === 0 || (${res} = ${data}/${schemaCode}, ${invalid}))`); - }, -}; -exports.default = def; -//# sourceMappingURL=multipleOf.js.map - -/***/ }), - -/***/ 4960: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const code_1 = __nccwpck_require__(4205); -const codegen_1 = __nccwpck_require__(9179); -const error = { - message: ({ schemaCode }) => (0, codegen_1.str) `must match pattern "${schemaCode}"`, - params: ({ schemaCode }) => (0, codegen_1._) `{pattern: ${schemaCode}}`, -}; -const def = { - keyword: "pattern", - type: "string", - schemaType: "string", - $data: true, - error, - code(cxt) { - const { data, $data, schema, schemaCode, it } = cxt; - // TODO regexp should be wrapped in try/catchs - const u = it.opts.unicodeRegExp ? "u" : ""; - const regExp = $data ? (0, codegen_1._) `(new RegExp(${schemaCode}, ${u}))` : (0, code_1.usePattern)(cxt, schema); - cxt.fail$data((0, codegen_1._) `!${regExp}.test(${data})`); - }, -}; -exports.default = def; -//# sourceMappingURL=pattern.js.map - -/***/ }), - -/***/ 3602: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const code_1 = __nccwpck_require__(4205); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const error = { - message: ({ params: { missingProperty } }) => (0, codegen_1.str) `must have required property '${missingProperty}'`, - params: ({ params: { missingProperty } }) => (0, codegen_1._) `{missingProperty: ${missingProperty}}`, -}; -const def = { - keyword: "required", - type: "object", - schemaType: "array", - $data: true, - error, - code(cxt) { - const { gen, schema, schemaCode, data, $data, it } = cxt; - const { opts } = it; - if (!$data && schema.length === 0) - return; - const useLoop = schema.length >= opts.loopRequired; - if (it.allErrors) - allErrorsMode(); - else - exitOnErrorMode(); - if (opts.strictRequired) { - const props = cxt.parentSchema.properties; - const { definedProperties } = cxt.it; - for (const requiredKey of schema) { - if ((props === null || props === void 0 ? void 0 : props[requiredKey]) === undefined && !definedProperties.has(requiredKey)) { - const schemaPath = it.schemaEnv.baseId + it.errSchemaPath; - const msg = `required property "${requiredKey}" is not defined at "${schemaPath}" (strictRequired)`; - (0, util_1.checkStrictMode)(it, msg, it.opts.strictRequired); - } - } - } - function allErrorsMode() { - if (useLoop || $data) { - cxt.block$data(codegen_1.nil, loopAllRequired); - } - else { - for (const prop of schema) { - (0, code_1.checkReportMissingProp)(cxt, prop); - } - } - } - function exitOnErrorMode() { - const missing = gen.let("missing"); - if (useLoop || $data) { - const valid = gen.let("valid", true); - cxt.block$data(valid, () => loopUntilMissing(missing, valid)); - cxt.ok(valid); - } - else { - gen.if((0, code_1.checkMissingProp)(cxt, schema, missing)); - (0, code_1.reportMissingProp)(cxt, missing); - gen.else(); - } - } - function loopAllRequired() { - gen.forOf("prop", schemaCode, (prop) => { - cxt.setParams({ missingProperty: prop }); - gen.if((0, code_1.noPropertyInData)(gen, data, prop, opts.ownProperties), () => cxt.error()); - }); - } - function loopUntilMissing(missing, valid) { - cxt.setParams({ missingProperty: missing }); - gen.forOf(missing, schemaCode, () => { - gen.assign(valid, (0, code_1.propertyInData)(gen, data, missing, opts.ownProperties)); - gen.if((0, codegen_1.not)(valid), () => { - cxt.error(); - gen.break(); - }); - }, codegen_1.nil); - } - }, -}; -exports.default = def; -//# sourceMappingURL=required.js.map - -/***/ }), - -/***/ 9351: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const dataType_1 = __nccwpck_require__(7725); -const codegen_1 = __nccwpck_require__(9179); -const util_1 = __nccwpck_require__(3439); -const equal_1 = __nccwpck_require__(3809); -const error = { - message: ({ params: { i, j } }) => (0, codegen_1.str) `must NOT have duplicate items (items ## ${j} and ${i} are identical)`, - params: ({ params: { i, j } }) => (0, codegen_1._) `{i: ${i}, j: ${j}}`, -}; -const def = { - keyword: "uniqueItems", - type: "array", - schemaType: "boolean", - $data: true, - error, - code(cxt) { - const { gen, data, $data, schema, parentSchema, schemaCode, it } = cxt; - if (!$data && !schema) - return; - const valid = gen.let("valid"); - const itemTypes = parentSchema.items ? (0, dataType_1.getSchemaTypes)(parentSchema.items) : []; - cxt.block$data(valid, validateUniqueItems, (0, codegen_1._) `${schemaCode} === false`); - cxt.ok(valid); - function validateUniqueItems() { - const i = gen.let("i", (0, codegen_1._) `${data}.length`); - const j = gen.let("j"); - cxt.setParams({ i, j }); - gen.assign(valid, true); - gen.if((0, codegen_1._) `${i} > 1`, () => (canOptimize() ? loopN : loopN2)(i, j)); - } - function canOptimize() { - return itemTypes.length > 0 && !itemTypes.some((t) => t === "object" || t === "array"); - } - function loopN(i, j) { - const item = gen.name("item"); - const wrongType = (0, dataType_1.checkDataTypes)(itemTypes, item, it.opts.strictNumbers, dataType_1.DataType.Wrong); - const indices = gen.const("indices", (0, codegen_1._) `{}`); - gen.for((0, codegen_1._) `;${i}--;`, () => { - gen.let(item, (0, codegen_1._) `${data}[${i}]`); - gen.if(wrongType, (0, codegen_1._) `continue`); - if (itemTypes.length > 1) - gen.if((0, codegen_1._) `typeof ${item} == "string"`, (0, codegen_1._) `${item} += "_"`); - gen - .if((0, codegen_1._) `typeof ${indices}[${item}] == "number"`, () => { - gen.assign(j, (0, codegen_1._) `${indices}[${item}]`); - cxt.error(); - gen.assign(valid, false).break(); - }) - .code((0, codegen_1._) `${indices}[${item}] = ${i}`); - }); - } - function loopN2(i, j) { - const eql = (0, util_1.useFunc)(gen, equal_1.default); - const outer = gen.name("outer"); - gen.label(outer).for((0, codegen_1._) `;${i}--;`, () => gen.for((0, codegen_1._) `${j} = ${i}; ${j}--;`, () => gen.if((0, codegen_1._) `${eql}(${data}[${i}], ${data}[${j}])`, () => { - cxt.error(); - gen.assign(valid, false).break(outer); - }))); - } - }, -}; -exports.default = def; -//# sourceMappingURL=uniqueItems.js.map - -/***/ }), - -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var register = __nccwpck_require__(4670) -var addHook = __nccwpck_require__(5549) -var removeHook = __nccwpck_require__(6819) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 5549: -/***/ ((module) => { - -module.exports = addHook; - -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } - - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; - } - - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; - } - - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; - } - - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} - - -/***/ }), - -/***/ 4670: -/***/ ((module) => { - -module.exports = register; - -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - - if (!options) { - options = {}; - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); - } - - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } - - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); -} - - -/***/ }), - -/***/ 6819: -/***/ ((module) => { - -module.exports = removeHook; - -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - - var index = state.registry[name] - .map(function (registered) { - return registered.orig; - }) - .indexOf(method); - - if (index === -1) { - return; - } - - state.registry[name].splice(index, 1); -} - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 8206: -/***/ ((module) => { - -"use strict"; - - -// do not edit .js files directly - edit src/index.jst - - - -module.exports = function equal(a, b) { - if (a === b) return true; - - if (a && b && typeof a == 'object' && typeof b == 'object') { - if (a.constructor !== b.constructor) return false; - - var length, i, keys; - if (Array.isArray(a)) { - length = a.length; - if (length != b.length) return false; - for (i = length; i-- !== 0;) - if (!equal(a[i], b[i])) return false; - return true; - } - - - - if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags; - if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf(); - if (a.toString !== Object.prototype.toString) return a.toString() === b.toString(); - - keys = Object.keys(a); - length = keys.length; - if (length !== Object.keys(b).length) return false; - - for (i = length; i-- !== 0;) - if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false; - - for (i = length; i-- !== 0;) { - var key = keys[i]; - - if (!equal(a[key], b[key])) return false; - } - - return true; - } - - // true if both NaN, false otherwise - return a!==a && b!==b; -}; - - -/***/ }), - -/***/ 9618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirsSync = __nccwpck_require__(2915).mkdirsSync -const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync -const stat = __nccwpck_require__(3901) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0002' - ) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - return handleFilterAndCopy(destStat, src, dest, opts) -} - -function handleFilterAndCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return startCopy(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), - -/***/ 8834: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirs = __nccwpck_require__(2915).mkdirs -const pathExists = __nccwpck_require__(3835).pathExists -const utimesMillis = __nccwpck_require__(2548).utimesMillis -const stat = __nccwpck_require__(3901) - -function copy (src, dest, opts, cb) { - if (typeof opts === 'function' && !cb) { - cb = opts - opts = {} - } else if (typeof opts === 'function') { - opts = { filter: opts } - } - - cb = cb || function () {} - opts = opts || {} - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0001' - ) - } - - stat.checkPaths(src, dest, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'copy', err => { - if (err) return cb(err) - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) - return checkParentDir(destStat, src, dest, opts, cb) - }) - }) -} - -function checkParentDir (destStat, src, dest, opts, cb) { - const destParent = path.dirname(dest) - pathExists(destParent, (err, dirExists) => { - if (err) return cb(err) - if (dirExists) return getStats(destStat, src, dest, opts, cb) - mkdirs(destParent, err => { - if (err) return cb(err) - return getStats(destStat, src, dest, opts, cb) - }) - }) -} - -function handleFilter (onInclude, destStat, src, dest, opts, cb) { - Promise.resolve(opts.filter(src, dest)).then(include => { - if (include) return onInclude(destStat, src, dest, opts, cb) - return cb() - }, error => cb(error)) -} - -function startCopy (destStat, src, dest, opts, cb) { - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) - return getStats(destStat, src, dest, opts, cb) -} - -function getStats (destStat, src, dest, opts, cb) { - const stat = opts.dereference ? fs.stat : fs.lstat - stat(src, (err, srcStat) => { - if (err) return cb(err) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) - else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) - else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) - return cb(new Error(`Unknown file: ${src}`)) - }) -} - -function onFile (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return copyFile(srcStat, src, dest, opts, cb) - return mayCopyFile(srcStat, src, dest, opts, cb) -} - -function mayCopyFile (srcStat, src, dest, opts, cb) { - if (opts.overwrite) { - fs.unlink(dest, err => { - if (err) return cb(err) - return copyFile(srcStat, src, dest, opts, cb) - }) - } else if (opts.errorOnExist) { - return cb(new Error(`'${dest}' already exists`)) - } else return cb() -} - -function copyFile (srcStat, src, dest, opts, cb) { - fs.copyFile(src, dest, err => { - if (err) return cb(err) - if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) - return setDestMode(dest, srcStat.mode, cb) - }) -} - -function handleTimestampsAndMode (srcMode, src, dest, cb) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - return makeFileWritable(dest, srcMode, err => { - if (err) return cb(err) - return setDestTimestampsAndMode(srcMode, src, dest, cb) - }) - } - return setDestTimestampsAndMode(srcMode, src, dest, cb) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode, cb) { - return setDestMode(dest, srcMode | 0o200, cb) -} - -function setDestTimestampsAndMode (srcMode, src, dest, cb) { - setDestTimestamps(src, dest, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) -} - -function setDestMode (dest, srcMode, cb) { - return fs.chmod(dest, srcMode, cb) -} - -function setDestTimestamps (src, dest, cb) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - fs.stat(src, (err, updatedSrcStat) => { - if (err) return cb(err) - return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) - }) -} - -function onDir (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) - return copyDir(src, dest, opts, cb) -} - -function mkDirAndCopy (srcMode, src, dest, opts, cb) { - fs.mkdir(dest, err => { - if (err) return cb(err) - copyDir(src, dest, opts, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) - }) -} - -function copyDir (src, dest, opts, cb) { - fs.readdir(src, (err, items) => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) -} - -function copyDirItems (items, src, dest, opts, cb) { - const item = items.pop() - if (!item) return cb() - return copyDirItem(items, item, src, dest, opts, cb) -} - -function copyDirItem (items, item, src, dest, opts, cb) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { destStat } = stats - startCopy(destStat, srcItem, destItem, opts, err => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) - }) -} - -function onLink (destStat, src, dest, opts, cb) { - fs.readlink(src, (err, resolvedSrc) => { - if (err) return cb(err) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlink(resolvedSrc, dest, cb) - } else { - fs.readlink(dest, (err, resolvedDest) => { - if (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) - return cb(err) - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) - } - return copyLink(resolvedSrc, dest, cb) - }) - } - }) -} - -function copyLink (resolvedSrc, dest, cb) { - fs.unlink(dest, err => { - if (err) return cb(err) - return fs.symlink(resolvedSrc, dest, cb) - }) -} - -module.exports = copy - - -/***/ }), - -/***/ 1335: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - copy: u(__nccwpck_require__(8834)), - copySync: __nccwpck_require__(9618) -} - - -/***/ }), - -/***/ 6970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const remove = __nccwpck_require__(7357) - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} - - -/***/ }), - -/***/ 2164: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) - -function createFile (file, callback) { - function makeFile () { - fs.writeFile(file, '', err => { - if (err) return callback(err) - callback() - }) - } - - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err - if (!err && stats.isFile()) return callback() - const dir = path.dirname(file) - fs.stat(dir, (err, stats) => { - if (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - return mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeFile() - }) - } - return callback(err) - } - - if (stats.isDirectory()) makeFile() - else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdir(dir, err => { - if (err) return callback(err) - }) - } - }) - }) -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch {} - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), - -/***/ 55: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { createFile, createFileSync } = __nccwpck_require__(2164) -const { createLink, createLinkSync } = __nccwpck_require__(3797) -const { createSymlink, createSymlinkSync } = __nccwpck_require__(2549) - -module.exports = { - // file - createFile, - createFileSync, - ensureFile: createFile, - ensureFileSync: createFileSync, - // link - createLink, - createLinkSync, - ensureLink: createLink, - ensureLinkSync: createLinkSync, - // symlink - createSymlink, - createSymlinkSync, - ensureSymlink: createSymlink, - ensureSymlinkSync: createSymlinkSync -} - - -/***/ }), - -/***/ 3797: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists -const { areIdentical } = __nccwpck_require__(3901) - -function createLink (srcpath, dstpath, callback) { - function makeLink (srcpath, dstpath) { - fs.link(srcpath, dstpath, err => { - if (err) return callback(err) - callback(null) - }) - } - - fs.lstat(dstpath, (_, dstStat) => { - fs.lstat(srcpath, (err, srcStat) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureLink') - return callback(err) - } - if (dstStat && areIdentical(srcStat, dstStat)) return callback(null) - - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return makeLink(srcpath, dstpath) - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeLink(srcpath, dstpath) - }) - }) - }) - }) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} - - -/***/ }), - -/***/ 3727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const pathExists = __nccwpck_require__(3835).pathExists - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -function symlinkPaths (srcpath, dstpath, callback) { - if (path.isAbsolute(srcpath)) { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: srcpath - }) - }) - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - return pathExists(relativeToDst, (err, exists) => { - if (err) return callback(err) - if (exists) { - return callback(null, { - toCwd: relativeToDst, - toDst: srcpath - }) - } else { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - }) - }) - } - }) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - let exists - if (path.isAbsolute(srcpath)) { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } else { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } - } - } -} - -module.exports = { - symlinkPaths, - symlinkPathsSync -} - - -/***/ }), - -/***/ 8254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function symlinkType (srcpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - if (type) return callback(null, type) - fs.lstat(srcpath, (err, stats) => { - if (err) return callback(null, 'file') - type = (stats && stats.isDirectory()) ? 'dir' : 'file' - callback(null, type) - }) -} - -function symlinkTypeSync (srcpath, type) { - let stats - - if (type) return type - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType, - symlinkTypeSync -} - - -/***/ }), - -/***/ 2549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(1176) -const _mkdirs = __nccwpck_require__(2915) -const mkdirs = _mkdirs.mkdirs -const mkdirsSync = _mkdirs.mkdirsSync - -const _symlinkPaths = __nccwpck_require__(3727) -const symlinkPaths = _symlinkPaths.symlinkPaths -const symlinkPathsSync = _symlinkPaths.symlinkPathsSync - -const _symlinkType = __nccwpck_require__(8254) -const symlinkType = _symlinkType.symlinkType -const symlinkTypeSync = _symlinkType.symlinkTypeSync - -const pathExists = __nccwpck_require__(3835).pathExists - -const { areIdentical } = __nccwpck_require__(3901) - -function createSymlink (srcpath, dstpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - - fs.lstat(dstpath, (err, stats) => { - if (!err && stats.isSymbolicLink()) { - Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]).then(([srcStat, dstStat]) => { - if (areIdentical(srcStat, dstStat)) return callback(null) - _createSymlink(srcpath, dstpath, type, callback) - }) - } else _createSymlink(srcpath, dstpath, type, callback) - }) -} - -function _createSymlink (srcpath, dstpath, type, callback) { - symlinkPaths(srcpath, dstpath, (err, relative) => { - if (err) return callback(err) - srcpath = relative.toDst - symlinkType(relative.toCwd, type, (err, type) => { - if (err) return callback(err) - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) - mkdirs(dir, err => { - if (err) return callback(err) - fs.symlink(srcpath, dstpath, type, callback) - }) - }) - }) - }) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch {} - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} - - -/***/ }), - -/***/ 1176: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.opendir was added in Node.js v12.12.0 - // fs.rm was added in Node.js v14.14.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// fs.writev only available in Node v12.9.0+ -if (typeof fs.writev === 'function') { - // Function signature is - // s.writev(fd, buffers[, position], callback) - // We need to handle the optional arg, so we use ...args - exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) - } -} - -// fs.realpath.native sometimes not available if fs is monkey-patched -if (typeof fs.realpath.native === 'function') { - exports.realpath.native = u(fs.realpath.native) -} else { - process.emitWarning( - 'fs.realpath.native is not a function. Is fs being monkey-patched?', - 'Warning', 'fs-extra-WARN0003' - ) -} - - -/***/ }), - -/***/ 5630: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__nccwpck_require__(1176), - // Export extra methods: - ...__nccwpck_require__(1335), - ...__nccwpck_require__(6970), - ...__nccwpck_require__(55), - ...__nccwpck_require__(213), - ...__nccwpck_require__(2915), - ...__nccwpck_require__(1497), - ...__nccwpck_require__(1832), - ...__nccwpck_require__(3835), - ...__nccwpck_require__(7357) -} - - -/***/ }), - -/***/ 213: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const jsonFile = __nccwpck_require__(8970) - -jsonFile.outputJson = u(__nccwpck_require__(531)) -jsonFile.outputJsonSync = __nccwpck_require__(9421) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), - -/***/ 8970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const jsonFile = __nccwpck_require__(6160) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), - -/***/ 9421: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFileSync } = __nccwpck_require__(1832) - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync - - -/***/ }), - -/***/ 531: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFile } = __nccwpck_require__(1832) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson - - -/***/ }), - -/***/ 2915: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), - -/***/ 2751: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(1176) -const { checkPath } = __nccwpck_require__(9907) - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} - - -/***/ }), - -/***/ 9907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const path = __nccwpck_require__(5622) - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - - -/***/ }), - -/***/ 1497: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - move: u(__nccwpck_require__(2231)), - moveSync: __nccwpck_require__(2047) -} - - -/***/ }), - -/***/ 2047: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copySync = __nccwpck_require__(1335).copySync -const removeSync = __nccwpck_require__(7357).removeSync -const mkdirpSync = __nccwpck_require__(2915).mkdirpSync -const stat = __nccwpck_require__(3901) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), - -/***/ 2231: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copy = __nccwpck_require__(1335).copy -const remove = __nccwpck_require__(7357).remove -const mkdirp = __nccwpck_require__(2915).mkdirp -const pathExists = __nccwpck_require__(3835).pathExists -const stat = __nccwpck_require__(3901) - -function move (src, dest, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - opts = opts || {} - - const overwrite = opts.overwrite || opts.clobber || false - - stat.checkPaths(src, dest, 'move', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, isChangingCase = false } = stats - stat.checkParentPaths(src, srcStat, dest, 'move', err => { - if (err) return cb(err) - if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) - mkdirp(path.dirname(dest), err => { - if (err) return cb(err) - return doRename(src, dest, overwrite, isChangingCase, cb) - }) - }) - }) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase, cb) { - if (isChangingCase) return rename(src, dest, overwrite, cb) - if (overwrite) { - return remove(dest, err => { - if (err) return cb(err) - return rename(src, dest, overwrite, cb) - }) - } - pathExists(dest, (err, destExists) => { - if (err) return cb(err) - if (destExists) return cb(new Error('dest already exists.')) - return rename(src, dest, overwrite, cb) - }) -} - -function rename (src, dest, overwrite, cb) { - fs.rename(src, dest, err => { - if (!err) return cb() - if (err.code !== 'EXDEV') return cb(err) - return moveAcrossDevice(src, dest, overwrite, cb) - }) -} - -function moveAcrossDevice (src, dest, overwrite, cb) { - const opts = { - overwrite, - errorOnExist: true - } - copy(src, dest, opts, err => { - if (err) return cb(err) - return remove(src, cb) - }) -} - -module.exports = move - - -/***/ }), - -/***/ 1832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists - -function outputFile (file, data, encoding, callback) { - if (typeof encoding === 'function') { - callback = encoding - encoding = 'utf8' - } - - const dir = path.dirname(file) - pathExists(dir, (err, itDoes) => { - if (err) return callback(err) - if (itDoes) return fs.writeFile(file, data, encoding, callback) - - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - - fs.writeFile(file, data, encoding, callback) - }) - }) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (fs.existsSync(dir)) { - return fs.writeFileSync(file, ...args) - } - mkdir.mkdirsSync(dir) - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), - -/***/ 3835: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), - -/***/ 7357: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const u = __nccwpck_require__(9046).fromCallback -const rimraf = __nccwpck_require__(7247) - -function remove (path, callback) { - // Node 14.14.0+ - if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback) - rimraf(path, callback) -} - -function removeSync (path) { - // Node 14.14.0+ - if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true }) - rimraf.sync(path) -} - -module.exports = { - remove: u(remove), - removeSync -} - - -/***/ }), - -/***/ 7247: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const assert = __nccwpck_require__(2357) - -const isWindows = (process.platform === 'win32') - -function defaults (options) { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 -} - -function rimraf (p, options, cb) { - let busyTries = 0 - - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && - busyTries < options.maxBusyTries) { - busyTries++ - const time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), time) - } - - // already gone - if (er.code === 'ENOENT') er = null - } - - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === 'ENOENT') { - return cb(null) - } - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === 'EPERM' && isWindows) { - return fixWinEPERM(p, options, er, cb) - } - - if (st && st.isDirectory()) { - return rmdir(p, options, er, cb) - } - - options.unlink(p, er => { - if (er) { - if (er.code === 'ENOENT') { - return cb(null) - } - if (er.code === 'EPERM') { - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - } - if (er.code === 'EISDIR') { - return rmdir(p, options, er, cb) - } - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) { - cb(er2.code === 'ENOENT' ? null : er) - } else { - options.stat(p, (er3, stats) => { - if (er3) { - cb(er3.code === 'ENOENT' ? null : er) - } else if (stats.isDirectory()) { - rmdir(p, options, er, cb) - } else { - options.unlink(p, cb) - } - }) - } - }) -} - -function fixWinEPERMSync (p, options, er) { - let stats - - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === 'ENOENT') { - return - } else { - throw er - } - } - - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === 'ENOENT') { - return - } else { - throw er - } - } - - if (stats.isDirectory()) { - rmdirSync(p, options, er) - } else { - options.unlinkSync(p) - } -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { - rmkids(p, options, cb) - } else if (er && er.code === 'ENOTDIR') { - cb(originalEr) - } else { - cb(er) - } - }) -} - -function rmkids (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) return cb(er) - - let n = files.length - let errState - - if (n === 0) return options.rmdir(p, cb) - - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) { - return - } - if (er) return cb(errState = er) - if (--n === 0) { - options.rmdir(p, cb) - } - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - let st - - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === 'ENOENT') { - return - } - - // Windows can EPERM on stat. Life is suffering. - if (er.code === 'EPERM' && isWindows) { - fixWinEPERMSync(p, options, er) - } - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) { - rmdirSync(p, options, null) - } else { - options.unlinkSync(p) - } - } catch (er) { - if (er.code === 'ENOENT') { - return - } else if (er.code === 'EPERM') { - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - } else if (er.code !== 'EISDIR') { - throw er - } - rmdirSync(p, options, er) - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === 'ENOTDIR') { - throw originalEr - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { - rmkidsSync(p, options) - } else if (er.code !== 'ENOENT') { - throw er - } - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - if (isWindows) { - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const startTime = Date.now() - do { - try { - const ret = options.rmdirSync(p, options) - return ret - } catch {} - } while (Date.now() - startTime < 500) // give up after 500ms - } else { - const ret = options.rmdirSync(p, options) - return ret - } -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), - -/***/ 3901: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const util = __nccwpck_require__(1669) - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -function checkPaths (src, dest, funcName, opts, cb) { - util.callbackify(getStats)(src, dest, opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return cb(null, { srcStat, destStat, isChangingCase: true }) - } - return cb(new Error('Source and destination must not be the same.')) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return cb(null, { srcStat, destStat }) - }) -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -function checkParentPaths (src, srcStat, dest, funcName, cb) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() - fs.stat(destParent, { bigint: true }, (err, destStat) => { - if (err) { - if (err.code === 'ENOENT') return cb() - return cb(err) - } - if (areIdentical(srcStat, destStat)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return checkParentPaths(src, srcStat, destParent, funcName, cb) - }) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - checkPaths, - checkPathsSync, - checkParentPaths, - checkParentPathsSync, - isSrcSubdir, - areIdentical -} - - -/***/ }), - -/***/ 2548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function utimesMillis (path, atime, mtime, callback) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - fs.open(path, 'r+', (err, fd) => { - if (err) return callback(err) - fs.futimes(fd, atime, mtime, futimesErr => { - fs.close(fd, closeErr => { - if (callback) callback(futimesErr || closeErr) - }) - }) - }) -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis, - utimesMillisSync -} - - -/***/ }), - -/***/ 7356: -/***/ ((module) => { - -"use strict"; - - -module.exports = clone - -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} - -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj - - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) - - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) - - return copy -} - - -/***/ }), - -/***/ 7758: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var fs = __nccwpck_require__(5747) -var polyfills = __nccwpck_require__(263) -var legacy = __nccwpck_require__(3086) -var clone = __nccwpck_require__(7356) - -var util = __nccwpck_require__(1669) - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - resetQueue() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - resetQueue() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb, startTime) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb, startTime) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb, startTime) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 - } - return go$copyFile(src, dest, flags, cb) - - function go$copyFile (src, dest, flags, cb, startTime) { - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - var noReaddirOptionVersions = /^v[0-5]\./ - function readdir (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - var go$readdir = noReaddirOptionVersions.test(process.version) - ? function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, fs$readdirCallback( - path, options, cb, startTime - )) - } - : function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, options, fs$readdirCallback( - path, options, cb, startTime - )) - } - - return go$readdir(path, options, cb) - - function fs$readdirCallback (path, options, cb, startTime) { - return function (err, files) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([ - go$readdir, - [path, options, cb], - err, - startTime || Date.now(), - Date.now() - ]) - else { - if (files && files.sort) - files.sort() - - if (typeof cb === 'function') - cb.call(this, err, files) - } - } - } - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } - - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } - - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null - - return go$open(path, flags, mode, cb) - - function go$open (path, flags, mode, cb, startTime) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) - retry() -} - -// keep track of the timeout between retry() calls -var retryTimer - -// reset the startTime and lastTime to now -// this resets the start of the 60 second overall timeout as well as the -// delay between attempts so that we'll retry these jobs sooner -function resetQueue () { - var now = Date.now() - for (var i = 0; i < fs[gracefulQueue].length; ++i) { - // entries that are only a length of 2 are from an older version, don't - // bother modifying those since they'll be retried anyway. - if (fs[gracefulQueue][i].length > 2) { - fs[gracefulQueue][i][3] = now // startTime - fs[gracefulQueue][i][4] = now // lastTime - } - } - // call retry to make sure we're actively processing the queue - retry() -} - -function retry () { - // clear the timer and remove it to help prevent unintended concurrency - clearTimeout(retryTimer) - retryTimer = undefined - - if (fs[gracefulQueue].length === 0) - return - - var elem = fs[gracefulQueue].shift() - var fn = elem[0] - var args = elem[1] - // these items may be unset if they were added by an older graceful-fs - var err = elem[2] - var startTime = elem[3] - var lastTime = elem[4] - - // if we don't have a startTime we have no way of knowing if we've waited - // long enough, so go ahead and retry this item now - if (startTime === undefined) { - debug('RETRY', fn.name, args) - fn.apply(null, args) - } else if (Date.now() - startTime >= 60000) { - // it's been more than 60 seconds total, bail now - debug('TIMEOUT', fn.name, args) - var cb = args.pop() - if (typeof cb === 'function') - cb.call(null, err) - } else { - // the amount of time between the last attempt and right now - var sinceAttempt = Date.now() - lastTime - // the amount of time between when we first tried, and when we last tried - // rounded up to at least 1 - var sinceStart = Math.max(lastTime - startTime, 1) - // backoff. wait longer than the total time we've been retrying, but only - // up to a maximum of 100ms - var desiredDelay = Math.min(sinceStart * 1.2, 100) - // it's been long enough since the last retry, do it again - if (sinceAttempt >= desiredDelay) { - debug('RETRY', fn.name, args) - fn.apply(null, args.concat([startTime])) - } else { - // if we can't do this job yet, push it to the end of the queue - // and let the next iteration check again - fs[gracefulQueue].push(elem) - } - } - - // schedule our next run if one isn't already scheduled - if (retryTimer === undefined) { - retryTimer = setTimeout(retry, 0) - } -} - - -/***/ }), - -/***/ 3086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(2413).Stream - -module.exports = legacy - -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); - - Stream.call(this); - - var self = this; - - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; - - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.encoding) this.setEncoding(this.encoding); - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } - - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - - this.pos = this.start; - } - - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } - - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } - - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); - - Stream.call(this); - - this.path = path; - this.fd = null; - this.writable = true; - - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; - } - - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); - } - } -} - - -/***/ }), - -/***/ 263: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var constants = __nccwpck_require__(7619) - -var origCwd = process.cwd -var cwd = null - -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} - -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} - -module.exports = patch - -function patch (fs) { - // (re-)implement some things that are known busted or missing. - - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) - } - - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) - } - - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. - - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) - - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) - - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) - - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) - - // if lchmod/lchown do not exist, then make them no-ops - if (fs.chmod && !fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (fs.chown && !fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } - - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. - - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = typeof fs.rename !== 'function' ? fs.rename - : (function (fs$rename) { - function rename (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) - return rename - })(fs.rename) - } - - // if read() returns EAGAIN, then just try it again. - fs.read = typeof fs.read !== 'function' ? fs.read - : (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) - - fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync - : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) - - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } - - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - - } else if (fs.futimes) { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } - - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } - - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - return stats; - } - } - - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true - - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true - } - - return false - } -} - - -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 2533: -/***/ ((module) => { - -"use strict"; - - -var traverse = module.exports = function (schema, opts, cb) { - // Legacy support for v0.3.1 and earlier. - if (typeof opts == 'function') { - cb = opts; - opts = {}; - } - - cb = opts.cb || cb; - var pre = (typeof cb == 'function') ? cb : cb.pre || function() {}; - var post = cb.post || function() {}; - - _traverse(opts, pre, post, schema, '', schema); -}; - - -traverse.keywords = { - additionalItems: true, - items: true, - contains: true, - additionalProperties: true, - propertyNames: true, - not: true, - if: true, - then: true, - else: true -}; - -traverse.arrayKeywords = { - items: true, - allOf: true, - anyOf: true, - oneOf: true -}; - -traverse.propsKeywords = { - $defs: true, - definitions: true, - properties: true, - patternProperties: true, - dependencies: true -}; - -traverse.skipKeywords = { - default: true, - enum: true, - const: true, - required: true, - maximum: true, - minimum: true, - exclusiveMaximum: true, - exclusiveMinimum: true, - multipleOf: true, - maxLength: true, - minLength: true, - pattern: true, - format: true, - maxItems: true, - minItems: true, - uniqueItems: true, - maxProperties: true, - minProperties: true -}; - - -function _traverse(opts, pre, post, schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex) { - if (schema && typeof schema == 'object' && !Array.isArray(schema)) { - pre(schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex); - for (var key in schema) { - var sch = schema[key]; - if (Array.isArray(sch)) { - if (key in traverse.arrayKeywords) { - for (var i=0; i { - -let _fs -try { - _fs = __nccwpck_require__(7758) -} catch (_) { - _fs = __nccwpck_require__(5747) -} -const universalify = __nccwpck_require__(9046) -const { stringify, stripBom } = __nccwpck_require__(5902) - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - let data = await universalify.fromCallback(fs.readFile)(file, options) - - data = stripBom(data) - - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - - return obj -} - -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } -} - -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - - await universalify.fromCallback(fs.writeFile)(file, str, options) -} - -const writeFile = universalify.fromPromise(_writeFile) - -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} - -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} - -module.exports = jsonfile - - -/***/ }), - -/***/ 5902: -/***/ ((module) => { - -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) - - return str.replace(/\n/g, EOL) + EOF -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } - - -/***/ }), - -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__nccwpck_require__(2413)); -var http = _interopDefault(__nccwpck_require__(8605)); -var Url = _interopDefault(__nccwpck_require__(8835)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(7211)); -var zlib = _interopDefault(__nccwpck_require__(8761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = __nccwpck_require__(2877).convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = Url.URL || whatwgUrl.URL; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; - - -/***/ }), - -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), - -/***/ 4256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var punycode = __nccwpck_require__(4213); -var mappingTable = __nccwpck_require__(68); - -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; - -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} - -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; - - while (start <= end) { - var mid = Math.floor((start + end) / 2); - - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; - } - } - - return null; -} - -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} - -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; - - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); - - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } - - processed += String.fromCodePoint(codePoint); - break; - } - } - - return { - string: processed, - error: hasError - }; -} - -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } - - var error = false; - - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; - } - - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; - } - } - - return { - label: label, - error: error - }; -} - -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); - - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } - } - - return { - string: labels.join("."), - error: result.error - }; -} - -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; - } - }); - - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; - } - - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; - } - } - } - - if (result.error) return null; - return labels.join("."); -}; - -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - - return { - domain: result.string, - error: result.error - }; -}; - -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - - -/***/ }), - -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(4219); - - -/***/ }), - -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var net = __nccwpck_require__(1631); -var tls = __nccwpck_require__(4016); -var http = __nccwpck_require__(8605); -var https = __nccwpck_require__(7211); -var events = __nccwpck_require__(8614); -var assert = __nccwpck_require__(2357); -var util = __nccwpck_require__(1669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), - -/***/ 5030: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9046: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - fn.call( - this, - ...args, - (err, res) => (err != null) ? reject(err) : resolve(res) - ) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) - }, 'name', { value: fn.name }) -} - - -/***/ }), - -/***/ 20: -/***/ (function(__unused_webpack_module, exports) { - -/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */ -(function (global, factory) { - true ? factory(exports) : - 0; -}(this, (function (exports) { 'use strict'; - -function merge() { - for (var _len = arguments.length, sets = Array(_len), _key = 0; _key < _len; _key++) { - sets[_key] = arguments[_key]; - } - - if (sets.length > 1) { - sets[0] = sets[0].slice(0, -1); - var xl = sets.length - 1; - for (var x = 1; x < xl; ++x) { - sets[x] = sets[x].slice(1, -1); - } - sets[xl] = sets[xl].slice(1); - return sets.join(''); - } else { - return sets[0]; - } -} -function subexp(str) { - return "(?:" + str + ")"; -} -function typeOf(o) { - return o === undefined ? "undefined" : o === null ? "null" : Object.prototype.toString.call(o).split(" ").pop().split("]").shift().toLowerCase(); -} -function toUpperCase(str) { - return str.toUpperCase(); -} -function toArray(obj) { - return obj !== undefined && obj !== null ? obj instanceof Array ? obj : typeof obj.length !== "number" || obj.split || obj.setInterval || obj.call ? [obj] : Array.prototype.slice.call(obj) : []; -} -function assign(target, source) { - var obj = target; - if (source) { - for (var key in source) { - obj[key] = source[key]; - } - } - return obj; -} - -function buildExps(isIRI) { - var ALPHA$$ = "[A-Za-z]", - CR$ = "[\\x0D]", - DIGIT$$ = "[0-9]", - DQUOTE$$ = "[\\x22]", - HEXDIG$$ = merge(DIGIT$$, "[A-Fa-f]"), - //case-insensitive - LF$$ = "[\\x0A]", - SP$$ = "[\\x20]", - PCT_ENCODED$ = subexp(subexp("%[EFef]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%[89A-Fa-f]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%" + HEXDIG$$ + HEXDIG$$)), - //expanded - GEN_DELIMS$$ = "[\\:\\/\\?\\#\\[\\]\\@]", - SUB_DELIMS$$ = "[\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\=]", - RESERVED$$ = merge(GEN_DELIMS$$, SUB_DELIMS$$), - UCSCHAR$$ = isIRI ? "[\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]" : "[]", - //subset, excludes bidi control characters - IPRIVATE$$ = isIRI ? "[\\uE000-\\uF8FF]" : "[]", - //subset - UNRESERVED$$ = merge(ALPHA$$, DIGIT$$, "[\\-\\.\\_\\~]", UCSCHAR$$), - SCHEME$ = subexp(ALPHA$$ + merge(ALPHA$$, DIGIT$$, "[\\+\\-\\.]") + "*"), - USERINFO$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:]")) + "*"), - DEC_OCTET$ = subexp(subexp("25[0-5]") + "|" + subexp("2[0-4]" + DIGIT$$) + "|" + subexp("1" + DIGIT$$ + DIGIT$$) + "|" + subexp("[1-9]" + DIGIT$$) + "|" + DIGIT$$), - DEC_OCTET_RELAXED$ = subexp(subexp("25[0-5]") + "|" + subexp("2[0-4]" + DIGIT$$) + "|" + subexp("1" + DIGIT$$ + DIGIT$$) + "|" + subexp("0?[1-9]" + DIGIT$$) + "|0?0?" + DIGIT$$), - //relaxed parsing rules - IPV4ADDRESS$ = subexp(DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$), - H16$ = subexp(HEXDIG$$ + "{1,4}"), - LS32$ = subexp(subexp(H16$ + "\\:" + H16$) + "|" + IPV4ADDRESS$), - IPV6ADDRESS1$ = subexp(subexp(H16$ + "\\:") + "{6}" + LS32$), - // 6( h16 ":" ) ls32 - IPV6ADDRESS2$ = subexp("\\:\\:" + subexp(H16$ + "\\:") + "{5}" + LS32$), - // "::" 5( h16 ":" ) ls32 - IPV6ADDRESS3$ = subexp(subexp(H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{4}" + LS32$), - //[ h16 ] "::" 4( h16 ":" ) ls32 - IPV6ADDRESS4$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,1}" + H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{3}" + LS32$), - //[ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - IPV6ADDRESS5$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,2}" + H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{2}" + LS32$), - //[ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - IPV6ADDRESS6$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,3}" + H16$) + "?\\:\\:" + H16$ + "\\:" + LS32$), - //[ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - IPV6ADDRESS7$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,4}" + H16$) + "?\\:\\:" + LS32$), - //[ *4( h16 ":" ) h16 ] "::" ls32 - IPV6ADDRESS8$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,5}" + H16$) + "?\\:\\:" + H16$), - //[ *5( h16 ":" ) h16 ] "::" h16 - IPV6ADDRESS9$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,6}" + H16$) + "?\\:\\:"), - //[ *6( h16 ":" ) h16 ] "::" - IPV6ADDRESS$ = subexp([IPV6ADDRESS1$, IPV6ADDRESS2$, IPV6ADDRESS3$, IPV6ADDRESS4$, IPV6ADDRESS5$, IPV6ADDRESS6$, IPV6ADDRESS7$, IPV6ADDRESS8$, IPV6ADDRESS9$].join("|")), - ZONEID$ = subexp(subexp(UNRESERVED$$ + "|" + PCT_ENCODED$) + "+"), - //RFC 6874 - IPV6ADDRZ$ = subexp(IPV6ADDRESS$ + "\\%25" + ZONEID$), - //RFC 6874 - IPV6ADDRZ_RELAXED$ = subexp(IPV6ADDRESS$ + subexp("\\%25|\\%(?!" + HEXDIG$$ + "{2})") + ZONEID$), - //RFC 6874, with relaxed parsing rules - IPVFUTURE$ = subexp("[vV]" + HEXDIG$$ + "+\\." + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:]") + "+"), - IP_LITERAL$ = subexp("\\[" + subexp(IPV6ADDRZ_RELAXED$ + "|" + IPV6ADDRESS$ + "|" + IPVFUTURE$) + "\\]"), - //RFC 6874 - REG_NAME$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$)) + "*"), - HOST$ = subexp(IP_LITERAL$ + "|" + IPV4ADDRESS$ + "(?!" + REG_NAME$ + ")" + "|" + REG_NAME$), - PORT$ = subexp(DIGIT$$ + "*"), - AUTHORITY$ = subexp(subexp(USERINFO$ + "@") + "?" + HOST$ + subexp("\\:" + PORT$) + "?"), - PCHAR$ = subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@]")), - SEGMENT$ = subexp(PCHAR$ + "*"), - SEGMENT_NZ$ = subexp(PCHAR$ + "+"), - SEGMENT_NZ_NC$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\@]")) + "+"), - PATH_ABEMPTY$ = subexp(subexp("\\/" + SEGMENT$) + "*"), - PATH_ABSOLUTE$ = subexp("\\/" + subexp(SEGMENT_NZ$ + PATH_ABEMPTY$) + "?"), - //simplified - PATH_NOSCHEME$ = subexp(SEGMENT_NZ_NC$ + PATH_ABEMPTY$), - //simplified - PATH_ROOTLESS$ = subexp(SEGMENT_NZ$ + PATH_ABEMPTY$), - //simplified - PATH_EMPTY$ = "(?!" + PCHAR$ + ")", - PATH$ = subexp(PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$), - QUERY$ = subexp(subexp(PCHAR$ + "|" + merge("[\\/\\?]", IPRIVATE$$)) + "*"), - FRAGMENT$ = subexp(subexp(PCHAR$ + "|[\\/\\?]") + "*"), - HIER_PART$ = subexp(subexp("\\/\\/" + AUTHORITY$ + PATH_ABEMPTY$) + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$), - URI$ = subexp(SCHEME$ + "\\:" + HIER_PART$ + subexp("\\?" + QUERY$) + "?" + subexp("\\#" + FRAGMENT$) + "?"), - RELATIVE_PART$ = subexp(subexp("\\/\\/" + AUTHORITY$ + PATH_ABEMPTY$) + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_EMPTY$), - RELATIVE$ = subexp(RELATIVE_PART$ + subexp("\\?" + QUERY$) + "?" + subexp("\\#" + FRAGMENT$) + "?"), - URI_REFERENCE$ = subexp(URI$ + "|" + RELATIVE$), - ABSOLUTE_URI$ = subexp(SCHEME$ + "\\:" + HIER_PART$ + subexp("\\?" + QUERY$) + "?"), - GENERIC_REF$ = "^(" + SCHEME$ + ")\\:" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?" + subexp("\\#(" + FRAGMENT$ + ")") + "?$", - RELATIVE_REF$ = "^(){0}" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?" + subexp("\\#(" + FRAGMENT$ + ")") + "?$", - ABSOLUTE_REF$ = "^(" + SCHEME$ + ")\\:" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?$", - SAMEDOC_REF$ = "^" + subexp("\\#(" + FRAGMENT$ + ")") + "?$", - AUTHORITY_REF$ = "^" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?$"; - return { - NOT_SCHEME: new RegExp(merge("[^]", ALPHA$$, DIGIT$$, "[\\+\\-\\.]"), "g"), - NOT_USERINFO: new RegExp(merge("[^\\%\\:]", UNRESERVED$$, SUB_DELIMS$$), "g"), - NOT_HOST: new RegExp(merge("[^\\%\\[\\]\\:]", UNRESERVED$$, SUB_DELIMS$$), "g"), - NOT_PATH: new RegExp(merge("[^\\%\\/\\:\\@]", UNRESERVED$$, SUB_DELIMS$$), "g"), - NOT_PATH_NOSCHEME: new RegExp(merge("[^\\%\\/\\@]", UNRESERVED$$, SUB_DELIMS$$), "g"), - NOT_QUERY: new RegExp(merge("[^\\%]", UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@\\/\\?]", IPRIVATE$$), "g"), - NOT_FRAGMENT: new RegExp(merge("[^\\%]", UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@\\/\\?]"), "g"), - ESCAPE: new RegExp(merge("[^]", UNRESERVED$$, SUB_DELIMS$$), "g"), - UNRESERVED: new RegExp(UNRESERVED$$, "g"), - OTHER_CHARS: new RegExp(merge("[^\\%]", UNRESERVED$$, RESERVED$$), "g"), - PCT_ENCODED: new RegExp(PCT_ENCODED$, "g"), - IPV4ADDRESS: new RegExp("^(" + IPV4ADDRESS$ + ")$"), - IPV6ADDRESS: new RegExp("^\\[?(" + IPV6ADDRESS$ + ")" + subexp(subexp("\\%25|\\%(?!" + HEXDIG$$ + "{2})") + "(" + ZONEID$ + ")") + "?\\]?$") //RFC 6874, with relaxed parsing rules - }; -} -var URI_PROTOCOL = buildExps(false); - -var IRI_PROTOCOL = buildExps(true); - -var slicedToArray = function () { - function sliceIterator(arr, i) { - var _arr = []; - var _n = true; - var _d = false; - var _e = undefined; - - try { - for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { - _arr.push(_s.value); - - if (i && _arr.length === i) break; - } - } catch (err) { - _d = true; - _e = err; - } finally { - try { - if (!_n && _i["return"]) _i["return"](); - } finally { - if (_d) throw _e; - } - } - - return _arr; - } - - return function (arr, i) { - if (Array.isArray(arr)) { - return arr; - } else if (Symbol.iterator in Object(arr)) { - return sliceIterator(arr, i); - } else { - throw new TypeError("Invalid attempt to destructure non-iterable instance"); - } - }; -}(); - - - - - - - - - - - - - -var toConsumableArray = function (arr) { - if (Array.isArray(arr)) { - for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; - - return arr2; - } else { - return Array.from(arr); - } -}; - -/** Highest positive signed 32-bit float value */ - -var maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 - -/** Bootstring parameters */ -var base = 36; -var tMin = 1; -var tMax = 26; -var skew = 38; -var damp = 700; -var initialBias = 72; -var initialN = 128; // 0x80 -var delimiter = '-'; // '\x2D' - -/** Regular expressions */ -var regexPunycode = /^xn--/; -var regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars -var regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators - -/** Error messages */ -var errors = { - 'overflow': 'Overflow: input needs wider integers to process', - 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', - 'invalid-input': 'Invalid input' -}; - -/** Convenience shortcuts */ -var baseMinusTMin = base - tMin; -var floor = Math.floor; -var stringFromCharCode = String.fromCharCode; - -/*--------------------------------------------------------------------------*/ - -/** - * A generic error utility function. - * @private - * @param {String} type The error type. - * @returns {Error} Throws a `RangeError` with the applicable error message. - */ -function error$1(type) { - throw new RangeError(errors[type]); -} - -/** - * A generic `Array#map` utility function. - * @private - * @param {Array} array The array to iterate over. - * @param {Function} callback The function that gets called for every array - * item. - * @returns {Array} A new array of values returned by the callback function. - */ -function map(array, fn) { - var result = []; - var length = array.length; - while (length--) { - result[length] = fn(array[length]); - } - return result; -} - -/** - * A simple `Array#map`-like wrapper to work with domain name strings or email - * addresses. - * @private - * @param {String} domain The domain name or email address. - * @param {Function} callback The function that gets called for every - * character. - * @returns {Array} A new string of characters returned by the callback - * function. - */ -function mapDomain(string, fn) { - var parts = string.split('@'); - var result = ''; - if (parts.length > 1) { - // In email addresses, only the domain name should be punycoded. Leave - // the local part (i.e. everything up to `@`) intact. - result = parts[0] + '@'; - string = parts[1]; - } - // Avoid `split(regex)` for IE8 compatibility. See #17. - string = string.replace(regexSeparators, '\x2E'); - var labels = string.split('.'); - var encoded = map(labels, fn).join('.'); - return result + encoded; -} - -/** - * Creates an array containing the numeric code points of each Unicode - * character in the string. While JavaScript uses UCS-2 internally, - * this function will convert a pair of surrogate halves (each of which - * UCS-2 exposes as separate characters) into a single code point, - * matching UTF-16. - * @see `punycode.ucs2.encode` - * @see - * @memberOf punycode.ucs2 - * @name decode - * @param {String} string The Unicode input string (UCS-2). - * @returns {Array} The new array of code points. - */ -function ucs2decode(string) { - var output = []; - var counter = 0; - var length = string.length; - while (counter < length) { - var value = string.charCodeAt(counter++); - if (value >= 0xD800 && value <= 0xDBFF && counter < length) { - // It's a high surrogate, and there is a next character. - var extra = string.charCodeAt(counter++); - if ((extra & 0xFC00) == 0xDC00) { - // Low surrogate. - output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); - } else { - // It's an unmatched surrogate; only append this code unit, in case the - // next code unit is the high surrogate of a surrogate pair. - output.push(value); - counter--; - } - } else { - output.push(value); - } - } - return output; -} - -/** - * Creates a string based on an array of numeric code points. - * @see `punycode.ucs2.decode` - * @memberOf punycode.ucs2 - * @name encode - * @param {Array} codePoints The array of numeric code points. - * @returns {String} The new Unicode string (UCS-2). - */ -var ucs2encode = function ucs2encode(array) { - return String.fromCodePoint.apply(String, toConsumableArray(array)); -}; - -/** - * Converts a basic code point into a digit/integer. - * @see `digitToBasic()` - * @private - * @param {Number} codePoint The basic numeric code point value. - * @returns {Number} The numeric value of a basic code point (for use in - * representing integers) in the range `0` to `base - 1`, or `base` if - * the code point does not represent a value. - */ -var basicToDigit = function basicToDigit(codePoint) { - if (codePoint - 0x30 < 0x0A) { - return codePoint - 0x16; - } - if (codePoint - 0x41 < 0x1A) { - return codePoint - 0x41; - } - if (codePoint - 0x61 < 0x1A) { - return codePoint - 0x61; - } - return base; -}; - -/** - * Converts a digit/integer into a basic code point. - * @see `basicToDigit()` - * @private - * @param {Number} digit The numeric value of a basic code point. - * @returns {Number} The basic code point whose value (when used for - * representing integers) is `digit`, which needs to be in the range - * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is - * used; else, the lowercase form is used. The behavior is undefined - * if `flag` is non-zero and `digit` has no uppercase form. - */ -var digitToBasic = function digitToBasic(digit, flag) { - // 0..25 map to ASCII a..z or A..Z - // 26..35 map to ASCII 0..9 - return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); -}; - -/** - * Bias adaptation function as per section 3.4 of RFC 3492. - * https://tools.ietf.org/html/rfc3492#section-3.4 - * @private - */ -var adapt = function adapt(delta, numPoints, firstTime) { - var k = 0; - delta = firstTime ? floor(delta / damp) : delta >> 1; - delta += floor(delta / numPoints); - for (; /* no initialization */delta > baseMinusTMin * tMax >> 1; k += base) { - delta = floor(delta / baseMinusTMin); - } - return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); -}; - -/** - * Converts a Punycode string of ASCII-only symbols to a string of Unicode - * symbols. - * @memberOf punycode - * @param {String} input The Punycode string of ASCII-only symbols. - * @returns {String} The resulting string of Unicode symbols. - */ -var decode = function decode(input) { - // Don't use UCS-2. - var output = []; - var inputLength = input.length; - var i = 0; - var n = initialN; - var bias = initialBias; - - // Handle the basic code points: let `basic` be the number of input code - // points before the last delimiter, or `0` if there is none, then copy - // the first basic code points to the output. - - var basic = input.lastIndexOf(delimiter); - if (basic < 0) { - basic = 0; - } - - for (var j = 0; j < basic; ++j) { - // if it's not a basic code point - if (input.charCodeAt(j) >= 0x80) { - error$1('not-basic'); - } - output.push(input.charCodeAt(j)); - } - - // Main decoding loop: start just after the last delimiter if any basic code - // points were copied; start at the beginning otherwise. - - for (var index = basic > 0 ? basic + 1 : 0; index < inputLength;) /* no final expression */{ - - // `index` is the index of the next character to be consumed. - // Decode a generalized variable-length integer into `delta`, - // which gets added to `i`. The overflow checking is easier - // if we increase `i` as we go, then subtract off its starting - // value at the end to obtain `delta`. - var oldi = i; - for (var w = 1, k = base;; /* no condition */k += base) { - - if (index >= inputLength) { - error$1('invalid-input'); - } - - var digit = basicToDigit(input.charCodeAt(index++)); - - if (digit >= base || digit > floor((maxInt - i) / w)) { - error$1('overflow'); - } - - i += digit * w; - var t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; - - if (digit < t) { - break; - } - - var baseMinusT = base - t; - if (w > floor(maxInt / baseMinusT)) { - error$1('overflow'); - } - - w *= baseMinusT; - } - - var out = output.length + 1; - bias = adapt(i - oldi, out, oldi == 0); - - // `i` was supposed to wrap around from `out` to `0`, - // incrementing `n` each time, so we'll fix that now: - if (floor(i / out) > maxInt - n) { - error$1('overflow'); - } - - n += floor(i / out); - i %= out; - - // Insert `n` at position `i` of the output. - output.splice(i++, 0, n); - } - - return String.fromCodePoint.apply(String, output); -}; - -/** - * Converts a string of Unicode symbols (e.g. a domain name label) to a - * Punycode string of ASCII-only symbols. - * @memberOf punycode - * @param {String} input The string of Unicode symbols. - * @returns {String} The resulting Punycode string of ASCII-only symbols. - */ -var encode = function encode(input) { - var output = []; - - // Convert the input in UCS-2 to an array of Unicode code points. - input = ucs2decode(input); - - // Cache the length. - var inputLength = input.length; - - // Initialize the state. - var n = initialN; - var delta = 0; - var bias = initialBias; - - // Handle the basic code points. - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = input[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var _currentValue2 = _step.value; - - if (_currentValue2 < 0x80) { - output.push(stringFromCharCode(_currentValue2)); - } - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - - var basicLength = output.length; - var handledCPCount = basicLength; - - // `handledCPCount` is the number of code points that have been handled; - // `basicLength` is the number of basic code points. - - // Finish the basic string with a delimiter unless it's empty. - if (basicLength) { - output.push(delimiter); - } - - // Main encoding loop: - while (handledCPCount < inputLength) { - - // All non-basic code points < n have been handled already. Find the next - // larger one: - var m = maxInt; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = input[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var currentValue = _step2.value; - - if (currentValue >= n && currentValue < m) { - m = currentValue; - } - } - - // Increase `delta` enough to advance the decoder's state to , - // but guard against overflow. - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - var handledCPCountPlusOne = handledCPCount + 1; - if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { - error$1('overflow'); - } - - delta += (m - n) * handledCPCountPlusOne; - n = m; - - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; - - try { - for (var _iterator3 = input[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var _currentValue = _step3.value; - - if (_currentValue < n && ++delta > maxInt) { - error$1('overflow'); - } - if (_currentValue == n) { - // Represent delta as a generalized variable-length integer. - var q = delta; - for (var k = base;; /* no condition */k += base) { - var t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; - if (q < t) { - break; - } - var qMinusT = q - t; - var baseMinusT = base - t; - output.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))); - q = floor(qMinusT / baseMinusT); - } - - output.push(stringFromCharCode(digitToBasic(q, 0))); - bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); - delta = 0; - ++handledCPCount; - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return) { - _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - - ++delta; - ++n; - } - return output.join(''); -}; - -/** - * Converts a Punycode string representing a domain name or an email address - * to Unicode. Only the Punycoded parts of the input will be converted, i.e. - * it doesn't matter if you call it on a string that has already been - * converted to Unicode. - * @memberOf punycode - * @param {String} input The Punycoded domain name or email address to - * convert to Unicode. - * @returns {String} The Unicode representation of the given Punycode - * string. - */ -var toUnicode = function toUnicode(input) { - return mapDomain(input, function (string) { - return regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string; - }); -}; - -/** - * Converts a Unicode string representing a domain name or an email address to - * Punycode. Only the non-ASCII parts of the domain name will be converted, - * i.e. it doesn't matter if you call it with a domain that's already in - * ASCII. - * @memberOf punycode - * @param {String} input The domain name or email address to convert, as a - * Unicode string. - * @returns {String} The Punycode representation of the given domain name or - * email address. - */ -var toASCII = function toASCII(input) { - return mapDomain(input, function (string) { - return regexNonASCII.test(string) ? 'xn--' + encode(string) : string; - }); -}; - -/*--------------------------------------------------------------------------*/ - -/** Define the public API */ -var punycode = { - /** - * A string representing the current Punycode.js version number. - * @memberOf punycode - * @type String - */ - 'version': '2.1.0', - /** - * An object of methods to convert from JavaScript's internal character - * representation (UCS-2) to Unicode code points, and back. - * @see - * @memberOf punycode - * @type Object - */ - 'ucs2': { - 'decode': ucs2decode, - 'encode': ucs2encode - }, - 'decode': decode, - 'encode': encode, - 'toASCII': toASCII, - 'toUnicode': toUnicode -}; - -/** - * URI.js - * - * @fileoverview An RFC 3986 compliant, scheme extendable URI parsing/validating/resolving library for JavaScript. - * @author Gary Court - * @see http://github.com/garycourt/uri-js - */ -/** - * Copyright 2011 Gary Court. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, are - * permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this list of - * conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, this list - * of conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY GARY COURT ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND - * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON - * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - * - * The views and conclusions contained in the software and documentation are those of the - * authors and should not be interpreted as representing official policies, either expressed - * or implied, of Gary Court. - */ -var SCHEMES = {}; -function pctEncChar(chr) { - var c = chr.charCodeAt(0); - var e = void 0; - if (c < 16) e = "%0" + c.toString(16).toUpperCase();else if (c < 128) e = "%" + c.toString(16).toUpperCase();else if (c < 2048) e = "%" + (c >> 6 | 192).toString(16).toUpperCase() + "%" + (c & 63 | 128).toString(16).toUpperCase();else e = "%" + (c >> 12 | 224).toString(16).toUpperCase() + "%" + (c >> 6 & 63 | 128).toString(16).toUpperCase() + "%" + (c & 63 | 128).toString(16).toUpperCase(); - return e; -} -function pctDecChars(str) { - var newStr = ""; - var i = 0; - var il = str.length; - while (i < il) { - var c = parseInt(str.substr(i + 1, 2), 16); - if (c < 128) { - newStr += String.fromCharCode(c); - i += 3; - } else if (c >= 194 && c < 224) { - if (il - i >= 6) { - var c2 = parseInt(str.substr(i + 4, 2), 16); - newStr += String.fromCharCode((c & 31) << 6 | c2 & 63); - } else { - newStr += str.substr(i, 6); - } - i += 6; - } else if (c >= 224) { - if (il - i >= 9) { - var _c = parseInt(str.substr(i + 4, 2), 16); - var c3 = parseInt(str.substr(i + 7, 2), 16); - newStr += String.fromCharCode((c & 15) << 12 | (_c & 63) << 6 | c3 & 63); - } else { - newStr += str.substr(i, 9); - } - i += 9; - } else { - newStr += str.substr(i, 3); - i += 3; - } - } - return newStr; -} -function _normalizeComponentEncoding(components, protocol) { - function decodeUnreserved(str) { - var decStr = pctDecChars(str); - return !decStr.match(protocol.UNRESERVED) ? str : decStr; - } - if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); - if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); - if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); - if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); - if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); - if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); - return components; -} - -function _stripLeadingZeros(str) { - return str.replace(/^0*(.*)/, "$1") || "0"; -} -function _normalizeIPv4(host, protocol) { - var matches = host.match(protocol.IPV4ADDRESS) || []; - - var _matches = slicedToArray(matches, 2), - address = _matches[1]; - - if (address) { - return address.split(".").map(_stripLeadingZeros).join("."); - } else { - return host; - } -} -function _normalizeIPv6(host, protocol) { - var matches = host.match(protocol.IPV6ADDRESS) || []; - - var _matches2 = slicedToArray(matches, 3), - address = _matches2[1], - zone = _matches2[2]; - - if (address) { - var _address$toLowerCase$ = address.toLowerCase().split('::').reverse(), - _address$toLowerCase$2 = slicedToArray(_address$toLowerCase$, 2), - last = _address$toLowerCase$2[0], - first = _address$toLowerCase$2[1]; - - var firstFields = first ? first.split(":").map(_stripLeadingZeros) : []; - var lastFields = last.split(":").map(_stripLeadingZeros); - var isLastFieldIPv4Address = protocol.IPV4ADDRESS.test(lastFields[lastFields.length - 1]); - var fieldCount = isLastFieldIPv4Address ? 7 : 8; - var lastFieldsStart = lastFields.length - fieldCount; - var fields = Array(fieldCount); - for (var x = 0; x < fieldCount; ++x) { - fields[x] = firstFields[x] || lastFields[lastFieldsStart + x] || ''; - } - if (isLastFieldIPv4Address) { - fields[fieldCount - 1] = _normalizeIPv4(fields[fieldCount - 1], protocol); - } - var allZeroFields = fields.reduce(function (acc, field, index) { - if (!field || field === "0") { - var lastLongest = acc[acc.length - 1]; - if (lastLongest && lastLongest.index + lastLongest.length === index) { - lastLongest.length++; - } else { - acc.push({ index: index, length: 1 }); - } - } - return acc; - }, []); - var longestZeroFields = allZeroFields.sort(function (a, b) { - return b.length - a.length; - })[0]; - var newHost = void 0; - if (longestZeroFields && longestZeroFields.length > 1) { - var newFirst = fields.slice(0, longestZeroFields.index); - var newLast = fields.slice(longestZeroFields.index + longestZeroFields.length); - newHost = newFirst.join(":") + "::" + newLast.join(":"); - } else { - newHost = fields.join(":"); - } - if (zone) { - newHost += "%" + zone; - } - return newHost; - } else { - return host; - } -} -var URI_PARSE = /^(?:([^:\/?#]+):)?(?:\/\/((?:([^\/?#@]*)@)?(\[[^\/?#\]]+\]|[^\/?#:]*)(?:\:(\d*))?))?([^?#]*)(?:\?([^#]*))?(?:#((?:.|\n|\r)*))?/i; -var NO_MATCH_IS_UNDEFINED = "".match(/(){0}/)[1] === undefined; -function parse(uriString) { - var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - var components = {}; - var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL; - if (options.reference === "suffix") uriString = (options.scheme ? options.scheme + ":" : "") + "//" + uriString; - var matches = uriString.match(URI_PARSE); - if (matches) { - if (NO_MATCH_IS_UNDEFINED) { - //store each component - components.scheme = matches[1]; - components.userinfo = matches[3]; - components.host = matches[4]; - components.port = parseInt(matches[5], 10); - components.path = matches[6] || ""; - components.query = matches[7]; - components.fragment = matches[8]; - //fix port number - if (isNaN(components.port)) { - components.port = matches[5]; - } - } else { - //IE FIX for improper RegExp matching - //store each component - components.scheme = matches[1] || undefined; - components.userinfo = uriString.indexOf("@") !== -1 ? matches[3] : undefined; - components.host = uriString.indexOf("//") !== -1 ? matches[4] : undefined; - components.port = parseInt(matches[5], 10); - components.path = matches[6] || ""; - components.query = uriString.indexOf("?") !== -1 ? matches[7] : undefined; - components.fragment = uriString.indexOf("#") !== -1 ? matches[8] : undefined; - //fix port number - if (isNaN(components.port)) { - components.port = uriString.match(/\/\/(?:.|\n)*\:(?:\/|\?|\#|$)/) ? matches[4] : undefined; - } - } - if (components.host) { - //normalize IP hosts - components.host = _normalizeIPv6(_normalizeIPv4(components.host, protocol), protocol); - } - //determine reference type - if (components.scheme === undefined && components.userinfo === undefined && components.host === undefined && components.port === undefined && !components.path && components.query === undefined) { - components.reference = "same-document"; - } else if (components.scheme === undefined) { - components.reference = "relative"; - } else if (components.fragment === undefined) { - components.reference = "absolute"; - } else { - components.reference = "uri"; - } - //check for reference errors - if (options.reference && options.reference !== "suffix" && options.reference !== components.reference) { - components.error = components.error || "URI is not a " + options.reference + " reference."; - } - //find scheme handler - var schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; - //check if scheme can't handle IRIs - if (!options.unicodeSupport && (!schemeHandler || !schemeHandler.unicodeSupport)) { - //if host component is a domain name - if (components.host && (options.domainHost || schemeHandler && schemeHandler.domainHost)) { - //convert Unicode IDN -> ASCII IDN - try { - components.host = punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()); - } catch (e) { - components.error = components.error || "Host's domain name can not be converted to ASCII via punycode: " + e; - } - } - //convert IRI -> URI - _normalizeComponentEncoding(components, URI_PROTOCOL); - } else { - //normalize encodings - _normalizeComponentEncoding(components, protocol); - } - //perform scheme specific parsing - if (schemeHandler && schemeHandler.parse) { - schemeHandler.parse(components, options); - } - } else { - components.error = components.error || "URI can not be parsed."; - } - return components; -} - -function _recomposeAuthority(components, options) { - var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL; - var uriTokens = []; - if (components.userinfo !== undefined) { - uriTokens.push(components.userinfo); - uriTokens.push("@"); - } - if (components.host !== undefined) { - //normalize IP hosts, add brackets and escape zone separator for IPv6 - uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, function (_, $1, $2) { - return "[" + $1 + ($2 ? "%25" + $2 : "") + "]"; - })); - } - if (typeof components.port === "number" || typeof components.port === "string") { - uriTokens.push(":"); - uriTokens.push(String(components.port)); - } - return uriTokens.length ? uriTokens.join("") : undefined; -} - -var RDS1 = /^\.\.?\//; -var RDS2 = /^\/\.(\/|$)/; -var RDS3 = /^\/\.\.(\/|$)/; -var RDS5 = /^\/?(?:.|\n)*?(?=\/|$)/; -function removeDotSegments(input) { - var output = []; - while (input.length) { - if (input.match(RDS1)) { - input = input.replace(RDS1, ""); - } else if (input.match(RDS2)) { - input = input.replace(RDS2, "/"); - } else if (input.match(RDS3)) { - input = input.replace(RDS3, "/"); - output.pop(); - } else if (input === "." || input === "..") { - input = ""; - } else { - var im = input.match(RDS5); - if (im) { - var s = im[0]; - input = input.slice(s.length); - output.push(s); - } else { - throw new Error("Unexpected dot segment condition"); - } - } - } - return output.join(""); -} - -function serialize(components) { - var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - var protocol = options.iri ? IRI_PROTOCOL : URI_PROTOCOL; - var uriTokens = []; - //find scheme handler - var schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; - //perform scheme specific serialization - if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); - if (components.host) { - //if host component is an IPv6 address - if (protocol.IPV6ADDRESS.test(components.host)) {} - //TODO: normalize IPv6 address as per RFC 5952 - - //if host component is a domain name - else if (options.domainHost || schemeHandler && schemeHandler.domainHost) { - //convert IDN via punycode - try { - components.host = !options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host); - } catch (e) { - components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; - } - } - } - //normalize encoding - _normalizeComponentEncoding(components, protocol); - if (options.reference !== "suffix" && components.scheme) { - uriTokens.push(components.scheme); - uriTokens.push(":"); - } - var authority = _recomposeAuthority(components, options); - if (authority !== undefined) { - if (options.reference !== "suffix") { - uriTokens.push("//"); - } - uriTokens.push(authority); - if (components.path && components.path.charAt(0) !== "/") { - uriTokens.push("/"); - } - } - if (components.path !== undefined) { - var s = components.path; - if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { - s = removeDotSegments(s); - } - if (authority === undefined) { - s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" - } - uriTokens.push(s); - } - if (components.query !== undefined) { - uriTokens.push("?"); - uriTokens.push(components.query); - } - if (components.fragment !== undefined) { - uriTokens.push("#"); - uriTokens.push(components.fragment); - } - return uriTokens.join(""); //merge tokens into a string -} - -function resolveComponents(base, relative) { - var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; - var skipNormalization = arguments[3]; - - var target = {}; - if (!skipNormalization) { - base = parse(serialize(base, options), options); //normalize base components - relative = parse(serialize(relative, options), options); //normalize relative components - } - options = options || {}; - if (!options.tolerant && relative.scheme) { - target.scheme = relative.scheme; - //target.authority = relative.authority; - target.userinfo = relative.userinfo; - target.host = relative.host; - target.port = relative.port; - target.path = removeDotSegments(relative.path || ""); - target.query = relative.query; - } else { - if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { - //target.authority = relative.authority; - target.userinfo = relative.userinfo; - target.host = relative.host; - target.port = relative.port; - target.path = removeDotSegments(relative.path || ""); - target.query = relative.query; - } else { - if (!relative.path) { - target.path = base.path; - if (relative.query !== undefined) { - target.query = relative.query; - } else { - target.query = base.query; - } - } else { - if (relative.path.charAt(0) === "/") { - target.path = removeDotSegments(relative.path); - } else { - if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { - target.path = "/" + relative.path; - } else if (!base.path) { - target.path = relative.path; - } else { - target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; - } - target.path = removeDotSegments(target.path); - } - target.query = relative.query; - } - //target.authority = base.authority; - target.userinfo = base.userinfo; - target.host = base.host; - target.port = base.port; - } - target.scheme = base.scheme; - } - target.fragment = relative.fragment; - return target; -} - -function resolve(baseURI, relativeURI, options) { - var schemelessOptions = assign({ scheme: 'null' }, options); - return serialize(resolveComponents(parse(baseURI, schemelessOptions), parse(relativeURI, schemelessOptions), schemelessOptions, true), schemelessOptions); -} - -function normalize(uri, options) { - if (typeof uri === "string") { - uri = serialize(parse(uri, options), options); - } else if (typeOf(uri) === "object") { - uri = parse(serialize(uri, options), options); - } - return uri; -} - -function equal(uriA, uriB, options) { - if (typeof uriA === "string") { - uriA = serialize(parse(uriA, options), options); - } else if (typeOf(uriA) === "object") { - uriA = serialize(uriA, options); - } - if (typeof uriB === "string") { - uriB = serialize(parse(uriB, options), options); - } else if (typeOf(uriB) === "object") { - uriB = serialize(uriB, options); - } - return uriA === uriB; -} - -function escapeComponent(str, options) { - return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.ESCAPE : IRI_PROTOCOL.ESCAPE, pctEncChar); -} - -function unescapeComponent(str, options) { - return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.PCT_ENCODED : IRI_PROTOCOL.PCT_ENCODED, pctDecChars); -} - -var handler = { - scheme: "http", - domainHost: true, - parse: function parse(components, options) { - //report missing host - if (!components.host) { - components.error = components.error || "HTTP URIs must have a host."; - } - return components; - }, - serialize: function serialize(components, options) { - var secure = String(components.scheme).toLowerCase() === "https"; - //normalize the default port - if (components.port === (secure ? 443 : 80) || components.port === "") { - components.port = undefined; - } - //normalize the empty path - if (!components.path) { - components.path = "/"; - } - //NOTE: We do not parse query strings for HTTP URIs - //as WWW Form Url Encoded query strings are part of the HTML4+ spec, - //and not the HTTP spec. - return components; - } -}; - -var handler$1 = { - scheme: "https", - domainHost: handler.domainHost, - parse: handler.parse, - serialize: handler.serialize -}; - -function isSecure(wsComponents) { - return typeof wsComponents.secure === 'boolean' ? wsComponents.secure : String(wsComponents.scheme).toLowerCase() === "wss"; -} -//RFC 6455 -var handler$2 = { - scheme: "ws", - domainHost: true, - parse: function parse(components, options) { - var wsComponents = components; - //indicate if the secure flag is set - wsComponents.secure = isSecure(wsComponents); - //construct resouce name - wsComponents.resourceName = (wsComponents.path || '/') + (wsComponents.query ? '?' + wsComponents.query : ''); - wsComponents.path = undefined; - wsComponents.query = undefined; - return wsComponents; - }, - serialize: function serialize(wsComponents, options) { - //normalize the default port - if (wsComponents.port === (isSecure(wsComponents) ? 443 : 80) || wsComponents.port === "") { - wsComponents.port = undefined; - } - //ensure scheme matches secure flag - if (typeof wsComponents.secure === 'boolean') { - wsComponents.scheme = wsComponents.secure ? 'wss' : 'ws'; - wsComponents.secure = undefined; - } - //reconstruct path from resource name - if (wsComponents.resourceName) { - var _wsComponents$resourc = wsComponents.resourceName.split('?'), - _wsComponents$resourc2 = slicedToArray(_wsComponents$resourc, 2), - path = _wsComponents$resourc2[0], - query = _wsComponents$resourc2[1]; - - wsComponents.path = path && path !== '/' ? path : undefined; - wsComponents.query = query; - wsComponents.resourceName = undefined; - } - //forbid fragment component - wsComponents.fragment = undefined; - return wsComponents; - } -}; - -var handler$3 = { - scheme: "wss", - domainHost: handler$2.domainHost, - parse: handler$2.parse, - serialize: handler$2.serialize -}; - -var O = {}; -var isIRI = true; -//RFC 3986 -var UNRESERVED$$ = "[A-Za-z0-9\\-\\.\\_\\~" + (isIRI ? "\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF" : "") + "]"; -var HEXDIG$$ = "[0-9A-Fa-f]"; //case-insensitive -var PCT_ENCODED$ = subexp(subexp("%[EFef]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%[89A-Fa-f]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%" + HEXDIG$$ + HEXDIG$$)); //expanded -//RFC 5322, except these symbols as per RFC 6068: @ : / ? # [ ] & ; = -//const ATEXT$$ = "[A-Za-z0-9\\!\\#\\$\\%\\&\\'\\*\\+\\-\\/\\=\\?\\^\\_\\`\\{\\|\\}\\~]"; -//const WSP$$ = "[\\x20\\x09]"; -//const OBS_QTEXT$$ = "[\\x01-\\x08\\x0B\\x0C\\x0E-\\x1F\\x7F]"; //(%d1-8 / %d11-12 / %d14-31 / %d127) -//const QTEXT$$ = merge("[\\x21\\x23-\\x5B\\x5D-\\x7E]", OBS_QTEXT$$); //%d33 / %d35-91 / %d93-126 / obs-qtext -//const VCHAR$$ = "[\\x21-\\x7E]"; -//const WSP$$ = "[\\x20\\x09]"; -//const OBS_QP$ = subexp("\\\\" + merge("[\\x00\\x0D\\x0A]", OBS_QTEXT$$)); //%d0 / CR / LF / obs-qtext -//const FWS$ = subexp(subexp(WSP$$ + "*" + "\\x0D\\x0A") + "?" + WSP$$ + "+"); -//const QUOTED_PAIR$ = subexp(subexp("\\\\" + subexp(VCHAR$$ + "|" + WSP$$)) + "|" + OBS_QP$); -//const QUOTED_STRING$ = subexp('\\"' + subexp(FWS$ + "?" + QCONTENT$) + "*" + FWS$ + "?" + '\\"'); -var ATEXT$$ = "[A-Za-z0-9\\!\\$\\%\\'\\*\\+\\-\\^\\_\\`\\{\\|\\}\\~]"; -var QTEXT$$ = "[\\!\\$\\%\\'\\(\\)\\*\\+\\,\\-\\.0-9\\<\\>A-Z\\x5E-\\x7E]"; -var VCHAR$$ = merge(QTEXT$$, "[\\\"\\\\]"); -var SOME_DELIMS$$ = "[\\!\\$\\'\\(\\)\\*\\+\\,\\;\\:\\@]"; -var UNRESERVED = new RegExp(UNRESERVED$$, "g"); -var PCT_ENCODED = new RegExp(PCT_ENCODED$, "g"); -var NOT_LOCAL_PART = new RegExp(merge("[^]", ATEXT$$, "[\\.]", '[\\"]', VCHAR$$), "g"); -var NOT_HFNAME = new RegExp(merge("[^]", UNRESERVED$$, SOME_DELIMS$$), "g"); -var NOT_HFVALUE = NOT_HFNAME; -function decodeUnreserved(str) { - var decStr = pctDecChars(str); - return !decStr.match(UNRESERVED) ? str : decStr; -} -var handler$4 = { - scheme: "mailto", - parse: function parse$$1(components, options) { - var mailtoComponents = components; - var to = mailtoComponents.to = mailtoComponents.path ? mailtoComponents.path.split(",") : []; - mailtoComponents.path = undefined; - if (mailtoComponents.query) { - var unknownHeaders = false; - var headers = {}; - var hfields = mailtoComponents.query.split("&"); - for (var x = 0, xl = hfields.length; x < xl; ++x) { - var hfield = hfields[x].split("="); - switch (hfield[0]) { - case "to": - var toAddrs = hfield[1].split(","); - for (var _x = 0, _xl = toAddrs.length; _x < _xl; ++_x) { - to.push(toAddrs[_x]); - } - break; - case "subject": - mailtoComponents.subject = unescapeComponent(hfield[1], options); - break; - case "body": - mailtoComponents.body = unescapeComponent(hfield[1], options); - break; - default: - unknownHeaders = true; - headers[unescapeComponent(hfield[0], options)] = unescapeComponent(hfield[1], options); - break; - } - } - if (unknownHeaders) mailtoComponents.headers = headers; - } - mailtoComponents.query = undefined; - for (var _x2 = 0, _xl2 = to.length; _x2 < _xl2; ++_x2) { - var addr = to[_x2].split("@"); - addr[0] = unescapeComponent(addr[0]); - if (!options.unicodeSupport) { - //convert Unicode IDN -> ASCII IDN - try { - addr[1] = punycode.toASCII(unescapeComponent(addr[1], options).toLowerCase()); - } catch (e) { - mailtoComponents.error = mailtoComponents.error || "Email address's domain name can not be converted to ASCII via punycode: " + e; - } - } else { - addr[1] = unescapeComponent(addr[1], options).toLowerCase(); - } - to[_x2] = addr.join("@"); - } - return mailtoComponents; - }, - serialize: function serialize$$1(mailtoComponents, options) { - var components = mailtoComponents; - var to = toArray(mailtoComponents.to); - if (to) { - for (var x = 0, xl = to.length; x < xl; ++x) { - var toAddr = String(to[x]); - var atIdx = toAddr.lastIndexOf("@"); - var localPart = toAddr.slice(0, atIdx).replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_LOCAL_PART, pctEncChar); - var domain = toAddr.slice(atIdx + 1); - //convert IDN via punycode - try { - domain = !options.iri ? punycode.toASCII(unescapeComponent(domain, options).toLowerCase()) : punycode.toUnicode(domain); - } catch (e) { - components.error = components.error || "Email address's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; - } - to[x] = localPart + "@" + domain; - } - components.path = to.join(","); - } - var headers = mailtoComponents.headers = mailtoComponents.headers || {}; - if (mailtoComponents.subject) headers["subject"] = mailtoComponents.subject; - if (mailtoComponents.body) headers["body"] = mailtoComponents.body; - var fields = []; - for (var name in headers) { - if (headers[name] !== O[name]) { - fields.push(name.replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFNAME, pctEncChar) + "=" + headers[name].replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFVALUE, pctEncChar)); - } - } - if (fields.length) { - components.query = fields.join("&"); - } - return components; - } -}; - -var URN_PARSE = /^([^\:]+)\:(.*)/; -//RFC 2141 -var handler$5 = { - scheme: "urn", - parse: function parse$$1(components, options) { - var matches = components.path && components.path.match(URN_PARSE); - var urnComponents = components; - if (matches) { - var scheme = options.scheme || urnComponents.scheme || "urn"; - var nid = matches[1].toLowerCase(); - var nss = matches[2]; - var urnScheme = scheme + ":" + (options.nid || nid); - var schemeHandler = SCHEMES[urnScheme]; - urnComponents.nid = nid; - urnComponents.nss = nss; - urnComponents.path = undefined; - if (schemeHandler) { - urnComponents = schemeHandler.parse(urnComponents, options); - } - } else { - urnComponents.error = urnComponents.error || "URN can not be parsed."; - } - return urnComponents; - }, - serialize: function serialize$$1(urnComponents, options) { - var scheme = options.scheme || urnComponents.scheme || "urn"; - var nid = urnComponents.nid; - var urnScheme = scheme + ":" + (options.nid || nid); - var schemeHandler = SCHEMES[urnScheme]; - if (schemeHandler) { - urnComponents = schemeHandler.serialize(urnComponents, options); - } - var uriComponents = urnComponents; - var nss = urnComponents.nss; - uriComponents.path = (nid || options.nid) + ":" + nss; - return uriComponents; - } -}; - -var UUID = /^[0-9A-Fa-f]{8}(?:\-[0-9A-Fa-f]{4}){3}\-[0-9A-Fa-f]{12}$/; -//RFC 4122 -var handler$6 = { - scheme: "urn:uuid", - parse: function parse(urnComponents, options) { - var uuidComponents = urnComponents; - uuidComponents.uuid = uuidComponents.nss; - uuidComponents.nss = undefined; - if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { - uuidComponents.error = uuidComponents.error || "UUID is not valid."; - } - return uuidComponents; - }, - serialize: function serialize(uuidComponents, options) { - var urnComponents = uuidComponents; - //normalize UUID - urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); - return urnComponents; - } -}; - -SCHEMES[handler.scheme] = handler; -SCHEMES[handler$1.scheme] = handler$1; -SCHEMES[handler$2.scheme] = handler$2; -SCHEMES[handler$3.scheme] = handler$3; -SCHEMES[handler$4.scheme] = handler$4; -SCHEMES[handler$5.scheme] = handler$5; -SCHEMES[handler$6.scheme] = handler$6; - -exports.SCHEMES = SCHEMES; -exports.pctEncChar = pctEncChar; -exports.pctDecChars = pctDecChars; -exports.parse = parse; -exports.removeDotSegments = removeDotSegments; -exports.serialize = serialize; -exports.resolveComponents = resolveComponents; -exports.resolve = resolve; -exports.normalize = normalize; -exports.equal = equal; -exports.escapeComponent = escapeComponent; -exports.unescapeComponent = unescapeComponent; - -Object.defineProperty(exports, '__esModule', { value: true }); - -}))); -//# sourceMappingURL=uri.all.js.map - - -/***/ }), - -/***/ 4886: -/***/ ((module) => { - -"use strict"; - - -var conversions = {}; -module.exports = conversions; - -function sign(x) { - return x < 0 ? -1 : 1; -} - -function evenRound(x) { - // Round x to the nearest integer, choosing the even integer if it lies halfway between two. - if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) - return Math.floor(x); - } else { - return Math.round(x); - } -} - -function createNumberConversion(bitLength, typeOpts) { - if (!typeOpts.unsigned) { - --bitLength; - } - const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); - const upperBound = Math.pow(2, bitLength) - 1; - - const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); - const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); - - return function(V, opts) { - if (!opts) opts = {}; - - let x = +V; - - if (opts.enforceRange) { - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite number"); - } - - x = sign(x) * Math.floor(Math.abs(x)); - if (x < lowerBound || x > upperBound) { - throw new TypeError("Argument is not in byte range"); - } - - return x; - } - - if (!isNaN(x) && opts.clamp) { - x = evenRound(x); - - if (x < lowerBound) x = lowerBound; - if (x > upperBound) x = upperBound; - return x; - } - - if (!Number.isFinite(x) || x === 0) { - return 0; - } - - x = sign(x) * Math.floor(Math.abs(x)); - x = x % moduloVal; - - if (!typeOpts.unsigned && x >= moduloBound) { - return x - moduloVal; - } else if (typeOpts.unsigned) { - if (x < 0) { - x += moduloVal; - } else if (x === -0) { // don't return negative zero - return 0; - } - } - - return x; - } -} - -conversions["void"] = function () { - return undefined; -}; - -conversions["boolean"] = function (val) { - return !!val; -}; - -conversions["byte"] = createNumberConversion(8, { unsigned: false }); -conversions["octet"] = createNumberConversion(8, { unsigned: true }); - -conversions["short"] = createNumberConversion(16, { unsigned: false }); -conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); - -conversions["long"] = createNumberConversion(32, { unsigned: false }); -conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); - -conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); -conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); - -conversions["double"] = function (V) { - const x = +V; - - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite floating-point value"); - } - - return x; -}; - -conversions["unrestricted double"] = function (V) { - const x = +V; - - if (isNaN(x)) { - throw new TypeError("Argument is NaN"); - } - - return x; -}; - -// not quite valid, but good enough for JS -conversions["float"] = conversions["double"]; -conversions["unrestricted float"] = conversions["unrestricted double"]; - -conversions["DOMString"] = function (V, opts) { - if (!opts) opts = {}; - - if (opts.treatNullAsEmptyString && V === null) { - return ""; - } - - return String(V); -}; - -conversions["ByteString"] = function (V, opts) { - const x = String(V); - let c = undefined; - for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { - if (c > 255) { - throw new TypeError("Argument is not a valid bytestring"); - } - } - - return x; -}; - -conversions["USVString"] = function (V) { - const S = String(V); - const n = S.length; - const U = []; - for (let i = 0; i < n; ++i) { - const c = S.charCodeAt(i); - if (c < 0xD800 || c > 0xDFFF) { - U.push(String.fromCodePoint(c)); - } else if (0xDC00 <= c && c <= 0xDFFF) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - if (i === n - 1) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - const d = S.charCodeAt(i + 1); - if (0xDC00 <= d && d <= 0xDFFF) { - const a = c & 0x3FF; - const b = d & 0x3FF; - U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); - ++i; - } else { - U.push(String.fromCodePoint(0xFFFD)); - } - } - } - } - - return U.join(''); -}; - -conversions["Date"] = function (V, opts) { - if (!(V instanceof Date)) { - throw new TypeError("Argument is not a Date object"); - } - if (isNaN(V)) { - return undefined; - } - - return V; -}; - -conversions["RegExp"] = function (V, opts) { - if (!(V instanceof RegExp)) { - V = new RegExp(V); - } - - return V; -}; - - -/***/ }), - -/***/ 7537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -const usm = __nccwpck_require__(2158); - -exports.implementation = class URLImpl { - constructor(constructorArgs) { - const url = constructorArgs[0]; - const base = constructorArgs[1]; - - let parsedBase = null; - if (base !== undefined) { - parsedBase = usm.basicURLParse(base); - if (parsedBase === "failure") { - throw new TypeError("Invalid base URL"); - } - } - - const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - - // TODO: query stuff - } - - get href() { - return usm.serializeURL(this._url); - } - - set href(v) { - const parsedURL = usm.basicURLParse(v); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - } - - get origin() { - return usm.serializeURLOrigin(this._url); - } - - get protocol() { - return this._url.scheme + ":"; - } - - set protocol(v) { - usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); - } - - get username() { - return this._url.username; - } - - set username(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setTheUsername(this._url, v); - } - - get password() { - return this._url.password; - } - - set password(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setThePassword(this._url, v); - } - - get host() { - const url = this._url; - - if (url.host === null) { - return ""; - } - - if (url.port === null) { - return usm.serializeHost(url.host); - } - - return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); - } - - set host(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); - } - - get hostname() { - if (this._url.host === null) { - return ""; - } - - return usm.serializeHost(this._url.host); - } - - set hostname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); - } - - get port() { - if (this._url.port === null) { - return ""; - } - - return usm.serializeInteger(this._url.port); - } - - set port(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - if (v === "") { - this._url.port = null; - } else { - usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); - } - } - - get pathname() { - if (this._url.cannotBeABaseURL) { - return this._url.path[0]; - } - - if (this._url.path.length === 0) { - return ""; - } - - return "/" + this._url.path.join("/"); - } - - set pathname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - this._url.path = []; - usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); - } - - get search() { - if (this._url.query === null || this._url.query === "") { - return ""; - } - - return "?" + this._url.query; - } - - set search(v) { - // TODO: query stuff - - const url = this._url; - - if (v === "") { - url.query = null; - return; - } - - const input = v[0] === "?" ? v.substring(1) : v; - url.query = ""; - usm.basicURLParse(input, { url, stateOverride: "query" }); - } - - get hash() { - if (this._url.fragment === null || this._url.fragment === "") { - return ""; - } - - return "#" + this._url.fragment; - } - - set hash(v) { - if (v === "") { - this._url.fragment = null; - return; - } - - const input = v[0] === "#" ? v.substring(1) : v; - this._url.fragment = ""; - usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); - } - - toJSON() { - return this.href; - } -}; - - -/***/ }), - -/***/ 3394: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const conversions = __nccwpck_require__(4886); -const utils = __nccwpck_require__(3185); -const Impl = __nccwpck_require__(7537); - -const impl = utils.implSymbol; - -function URL(url) { - if (!this || this[impl] || !(this instanceof URL)) { - throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); - } - if (arguments.length < 1) { - throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); - } - const args = []; - for (let i = 0; i < arguments.length && i < 2; ++i) { - args[i] = arguments[i]; - } - args[0] = conversions["USVString"](args[0]); - if (args[1] !== undefined) { - args[1] = conversions["USVString"](args[1]); - } - - module.exports.setup(this, args); -} - -URL.prototype.toJSON = function toJSON() { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - const args = []; - for (let i = 0; i < arguments.length && i < 0; ++i) { - args[i] = arguments[i]; - } - return this[impl].toJSON.apply(this[impl], args); -}; -Object.defineProperty(URL.prototype, "href", { - get() { - return this[impl].href; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].href = V; - }, - enumerable: true, - configurable: true -}); - -URL.prototype.toString = function () { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - return this.href; -}; - -Object.defineProperty(URL.prototype, "origin", { - get() { - return this[impl].origin; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "protocol", { - get() { - return this[impl].protocol; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].protocol = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "username", { - get() { - return this[impl].username; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].username = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "password", { - get() { - return this[impl].password; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].password = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "host", { - get() { - return this[impl].host; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].host = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hostname", { - get() { - return this[impl].hostname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hostname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "port", { - get() { - return this[impl].port; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].port = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "pathname", { - get() { - return this[impl].pathname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].pathname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "search", { - get() { - return this[impl].search; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].search = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hash", { - get() { - return this[impl].hash; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hash = V; - }, - enumerable: true, - configurable: true -}); - - -module.exports = { - is(obj) { - return !!obj && obj[impl] instanceof Impl.implementation; - }, - create(constructorArgs, privateData) { - let obj = Object.create(URL.prototype); - this.setup(obj, constructorArgs, privateData); - return obj; - }, - setup(obj, constructorArgs, privateData) { - if (!privateData) privateData = {}; - privateData.wrapper = obj; - - obj[impl] = new Impl.implementation(constructorArgs, privateData); - obj[impl][utils.wrapperSymbol] = obj; - }, - interface: URL, - expose: { - Window: { URL: URL }, - Worker: { URL: URL } - } -}; - - - -/***/ }), - -/***/ 8665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -exports.URL = __nccwpck_require__(3394).interface; -exports.serializeURL = __nccwpck_require__(2158).serializeURL; -exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; -exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; -exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; -exports.setThePassword = __nccwpck_require__(2158).setThePassword; -exports.serializeHost = __nccwpck_require__(2158).serializeHost; -exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; -exports.parseURL = __nccwpck_require__(2158).parseURL; - - -/***/ }), - -/***/ 2158: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const punycode = __nccwpck_require__(4213); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; - - -/***/ }), - -/***/ 3185: -/***/ ((module) => { - -"use strict"; - - -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } -}; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; -}; - -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; -}; - - - -/***/ }), - -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} - - -/***/ }), - -/***/ 7786: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockMap = __nccwpck_require__(6638); -var resolveBlockSeq = __nccwpck_require__(2257); -var resolveFlowCollection = __nccwpck_require__(6085); - -function composeCollection(CN, ctx, token, tagToken, onError) { - let coll; - switch (token.type) { - case 'block-map': { - coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError); - break; - } - case 'block-seq': { - coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError); - break; - } - case 'flow-collection': { - coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError); - break; - } - } - if (!tagToken) - return coll; - const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (!tagName) - return coll; - // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841 - const Coll = coll.constructor; - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - const expType = Node.isMap(coll) ? 'map' : 'seq'; - let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - coll.tag = tagName; - return coll; - } - } - const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options); - const node = Node.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; - - -/***/ }), - -/***/ 9157: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Document = __nccwpck_require__(1785); -var composeNode = __nccwpck_require__(6572); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; - - -/***/ }), - -/***/ 6572: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var composeCollection = __nccwpck_require__(7786); -var composeScalar = __nccwpck_require__(1490); -var resolveEnd = __nccwpck_require__(1789); -var utilEmptyScalarPosition = __nccwpck_require__(8517); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) - node.comment = comment; - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; - - -/***/ }), - -/***/ 1490: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[Node.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[Node.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[Node.SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[Node.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; - - -/***/ }), - -/***/ 9386: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var directives = __nccwpck_require__(8729); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var Node = __nccwpck_require__(9752); -var composeDoc = __nccwpck_require__(9157); -var resolveEnd = __nccwpck_require__(1789); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (Node.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; - - -/***/ }), - -/***/ 6638: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilFlowIndentCheck = __nccwpck_require__(7013); -var utilMapIncludes = __nccwpck_require__(379); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) { - const map = new YAMLMap.YAMLMap(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - // TODO: assert being at last item? - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - map.range = [bm.offset, offset, offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; - - -/***/ }), - -/***/ 8172: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; - - -/***/ }), - -/***/ 2257: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var resolveProps = __nccwpck_require__(9663); -var utilFlowIndentCheck = __nccwpck_require__(7013); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) { - const seq = new YAMLSeq.YAMLSeq(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - offset = props.end; - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - // TODO: assert being at last item? - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, offset, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; - - -/***/ }), - -/***/ 1789: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; - - -/***/ }), - -/***/ 6085: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilMapIncludes = __nccwpck_require__(379); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const coll = isMap - ? new YAMLMap.YAMLMap(ctx.schema) - : new YAMLSeq.YAMLSeq(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (Node.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; - - -/***/ }), - -/***/ 6390: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var resolveEnd = __nccwpck_require__(1789); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', - a: '\x07', - b: '\b', - e: '\x1b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', - v: '\v', - N: '\u0085', - _: '\u00a0', - L: '\u2028', - P: '\u2029', - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; - - -/***/ }), - -/***/ 9663: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; - - -/***/ }), - -/***/ 7801: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; - - -/***/ }), - -/***/ 8517: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; - - -/***/ }), - -/***/ 7013: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var utilContainsNewline = __nccwpck_require__(7801); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; - - -/***/ }), - -/***/ 379: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (Node.isScalar(a) && - Node.isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; - - -/***/ }), - -/***/ 1785: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var toJS = __nccwpck_require__(8842); -var Schema = __nccwpck_require__(7425); -var stringify = __nccwpck_require__(1922); -var stringifyDocument = __nccwpck_require__(9615); -var anchors = __nccwpck_require__(584); -var applyReviver = __nccwpck_require__(9833); -var createNode = __nccwpck_require__(9807); -var directives = __nccwpck_require__(8729); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - if (value === undefined) - this.contents = null; - else { - this.contents = this.createNode(value, _replacer, options); - } - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [Node.NODE_TYPE]: { value: Node.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - copy.contents = Node.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && Node.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return Node.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && Node.isScalar(this.contents) - ? this.contents.value - : this.contents; - return Node.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return Node.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) - this.contents = value; - else if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, - stringify: stringify.stringify - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (Node.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; - - -/***/ }), - -/***/ 584: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (Node.isScalar(ref.node) || Node.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; - - -/***/ }), - -/***/ 9833: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; - - -/***/ }), - -/***/ 9807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (Node.isDocument(value)) - value = value.contents; - if (Node.isNode(value)) - return value; - if (Node.isPair(value)) { - const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[Node.MAP] - : Symbol.iterator in Object(value) - ? schema[Node.SEQ] - : schema[Node.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; - - -/***/ }), - -/***/ 8729: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) - return prefix + decodeURIComponent(suffix); - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (Node.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; - - -/***/ }), - -/***/ 3305: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.min(end.col - col, 80 - ci); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; - - -/***/ }), - -/***/ 5065: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var Schema = __nccwpck_require__(7425); -var errors = __nccwpck_require__(3305); -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); -var publicApi = __nccwpck_require__(9191); -var visit = __nccwpck_require__(8234); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = Node.isAlias; -exports.isCollection = Node.isCollection; -exports.isDocument = Node.isDocument; -exports.isMap = Node.isMap; -exports.isNode = Node.isNode; -exports.isPair = Node.isPair; -exports.isScalar = Node.isScalar; -exports.isSeq = Node.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; - - -/***/ }), - -/***/ 469: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; - - -/***/ }), - -/***/ 9712: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var visit = __nccwpck_require__(8234); -var Node = __nccwpck_require__(9752); - -class Alias extends Node.NodeBase { - constructor(source) { - super(Node.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - const data = anchors.get(source); - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (Node.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (Node.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (Node.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; - - -/***/ }), - -/***/ 9797: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (Node.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (Node.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && Node.isScalar(node) ? node.value : node; - else - return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!Node.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - Node.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return Node.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (Node.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; - - -/***/ }), - -/***/ 9752: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } -} - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.NodeBase = NodeBase; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; - - -/***/ }), - -/***/ 3497: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var stringifyPair = __nccwpck_require__(10); -var addPairToJSMap = __nccwpck_require__(5193); -var Node = __nccwpck_require__(9752); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (Node.isNode(key)) - key = key.clone(schema); - if (Node.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; - - -/***/ }), - -/***/ 8160: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var toJS = __nccwpck_require__(8842); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(Node.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; - - -/***/ }), - -/***/ 6761: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var addPairToJSMap = __nccwpck_require__(5193); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); - -function findPair(items, key) { - const k = Node.isScalar(key) ? key.value : key; - for (const it of items) { - if (Node.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (Node.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - constructor(schema) { - super(Node.MAP, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (Node.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!Node.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; - - -/***/ }), - -/***/ 3810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -class YAMLSeq extends Collection.Collection { - constructor(schema) { - super(Node.SEQ, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && Node.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (Node.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } -} -function asItemIndex(key) { - let idx = Node.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; - - -/***/ }), - -/***/ 5193: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var log = __nccwpck_require__(469); -var stringify = __nccwpck_require__(1922); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (Node.isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (Node.isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!Node.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (Node.isNode(key) && ctx && ctx.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; - - -/***/ }), - -/***/ 8842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !Node.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; - - -/***/ }), - -/***/ 4000: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); -var errors = __nccwpck_require__(3305); -var stringifyString = __nccwpck_require__(6084); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; - - -/***/ }), - -/***/ 28: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; - - -/***/ }), - -/***/ 1621: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; - - -/***/ }), - -/***/ 9726: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cstScalar = __nccwpck_require__(4000); -var cstStringify = __nccwpck_require__(28); -var cstVisit = __nccwpck_require__(1621); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; - - -/***/ }), - -/***/ 7263: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - const cs = line.indexOf('#'); - if (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') - dirEnd = cs - 1; - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; - - -/***/ }), - -/***/ 5748: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; - - -/***/ }), - -/***/ 6466: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; - - -/***/ }), - -/***/ 9191: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var log = __nccwpck_require__(469); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; - - -/***/ }), - -/***/ 7425: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var tags = __nccwpck_require__(2102); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, Node.MAP, { value: map.map }); - Object.defineProperty(this, Node.SCALAR, { value: string.string }); - Object.defineProperty(this, Node.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; - - -/***/ }), - -/***/ 1400: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -function createMap(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new YAMLMap.YAMLMap(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; -} -const map = { - collection: 'map', - createNode: createMap, - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!Node.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.map = map; - - -/***/ }), - -/***/ 7556: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; - - -/***/ }), - -/***/ 6698: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); -var YAMLSeq = __nccwpck_require__(3810); - -function createSeq(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new YAMLSeq.YAMLSeq(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; -} -const seq = { - collection: 'seq', - createNode: createSeq, - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!Node.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - } -}; - -exports.seq = seq; - - -/***/ }), - -/***/ 1576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyString = __nccwpck_require__(6084); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; - - -/***/ }), - -/***/ 7369: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; - - -/***/ }), - -/***/ 616: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 2521: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 7504: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 8553: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; - - -/***/ }), - -/***/ 2102: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); -var schema = __nccwpck_require__(7504); -var schema$1 = __nccwpck_require__(8553); -var binary = __nccwpck_require__(5758); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var schema$2 = __nccwpck_require__(8536); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; - - -/***/ }), - -/***/ 5758: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyString = __nccwpck_require__(6084); - -const binary = { - identify: value => value instanceof Uint8Array, - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; - - -/***/ }), - -/***/ 2684: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; - - -/***/ }), - -/***/ 8090: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 7254: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 4486: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var toJS = __nccwpck_require__(8842); -var Node = __nccwpck_require__(9752); -var YAMLMap = __nccwpck_require__(6761); -var pairs = __nccwpck_require__(5169); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (Node.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (Node.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new YAMLOMap(); - omap.items = pairs$1.items; - return omap; - } -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; - - -/***/ }), - -/***/ 5169: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLSeq = __nccwpck_require__(3810); - -function resolvePairs(seq, onError) { - if (Node.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (Node.isPair(item)) - continue; - else if (Node.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else - throw new TypeError(`Expected { key: value } tuple: ${it}`); - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; - - -/***/ }), - -/***/ 8536: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var binary = __nccwpck_require__(5758); -var bool = __nccwpck_require__(2684); -var float = __nccwpck_require__(8090); -var int = __nccwpck_require__(7254); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 9549: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (Node.isPair(key)) - pair = key; - else if (typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && Node.isPair(pair) - ? Node.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - resolve(map, onError) { - if (Node.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - }, - createNode(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new YAMLSet(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; - - -/***/ }), - -/***/ 7717: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => (n < 10 ? '0' + String(n) : String(n))) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; - - -/***/ }), - -/***/ 1883: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i); - end = i + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i) { - let ch = text[i + 1]; - while (ch === ' ' || ch === '\t') { - do { - ch = text[(i += 1)]; - } while (ch && ch !== '\n'); - ch = text[i + 1]; - } - return i; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; - - -/***/ }), - -/***/ 1922: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var Node = __nccwpck_require__(9752); -var stringifyComment = __nccwpck_require__(5577); -var stringifyString = __nccwpck_require__(6084); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (Node.isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (Node.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (Node.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = Node.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : Node.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return Node.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; - - -/***/ }), - -/***/ 3541: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { - const { indent, indentStep, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = Node.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik && ik.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - let str; - const { start, end } = flowChars; - if (lines.length === 0) { - str = start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength; - } - if (reqNewline) { - str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - str += `\n${indent}${end}`; - } - else { - str = `${start} ${lines.join(' ')} ${end}`; - } - } - if (comment) { - str += stringifyComment.lineComment(str, commentString(comment), indent); - if (onComment) - onComment(); - } - return str; -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; - - -/***/ }), - -/***/ 5577: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; - - -/***/ }), - -/***/ 9615: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (Node.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; - - -/***/ }), - -/***/ 780: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; - - -/***/ }), - -/***/ 10: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (Node.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (Node.isCollection(key)) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - Node.isCollection(key) || - (Node.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vcb = ''; - let valueComment = null; - if (Node.isNode(value)) { - if (value.spaceBefore) - vcb = '\n'; - if (value.commentBefore) { - const cs = commentString(value.commentBefore); - vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - valueComment = value.comment; - } - else if (value && typeof value === 'object') { - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && Node.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - Node.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substr(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (vcb || keyComment) { - if (valueStr === '' && !ctx.inFlow) - ws = vcb === '\n' ? '\n\n' : vcb; - else - ws = `${vcb}\n${ctx.indent}`; - } - else if (!explicitKey && Node.isCollection(value)) { - const flow = valueStr[0] === '[' || valueStr[0] === '{'; - if (!flow || valueStr.includes('\n')) - ws = `\n${ctx.indent}`; - } - else if (valueStr === '' || valueStr[0] === '\n') - ws = ''; - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; - - -/***/ }), - -/***/ 6084: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var foldFlowLines = __nccwpck_require__(1883); - -const getFoldOptions = (ctx) => ({ - indentAtStart: ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -function blockString({ comment, type, value }, ctx, onComment, onChompKeep) { - const { blockQuote, commentString, lineWidth } = ctx.options; - // 1. Block can't end in whitespace unless the last line is non-empty. - // 2. Strings consisting of only whitespace are best rendered explicitly. - if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { - return quotedString(value, ctx); - } - const indent = ctx.indent || - (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); - const literal = blockQuote === 'literal' - ? true - : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED - ? false - : type === Scalar.Scalar.BLOCK_LITERAL - ? true - : !lineLengthOverLimit(value, lineWidth, indent.length); - if (!value) - return literal ? '|\n' : '>\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, inFlow } = ctx; - if ((implicitKey && /[\n[\]{},]/.test(value)) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (indent === '' && containsDocumentMarker(value)) { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; - - -/***/ }), - -/***/ 8234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (Node.isMap(node)) - return visitor.Map?.(key, node, path); - if (Node.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (Node.isPair(node)) - return visitor.Pair?.(key, node, path); - if (Node.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (Node.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (Node.isCollection(parent)) { - parent.items[key] = node; - } - else if (Node.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (Node.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = Node.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; - - -/***/ }), - -/***/ 2877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 2228: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('{"$id":"https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#","description":"Meta-schema for $data reference (JSON AnySchema extension proposal)","type":"object","required":["$data"],"properties":{"$data":{"type":"string","anyOf":[{"format":"relative-json-pointer"},{"format":"json-pointer"}]}},"additionalProperties":false}'); - -/***/ }), - -/***/ 278: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema#","$id":"http://json-schema.org/draft-07/schema#","title":"Core schema meta-schema","definitions":{"schemaArray":{"type":"array","minItems":1,"items":{"$ref":"#"}},"nonNegativeInteger":{"type":"integer","minimum":0},"nonNegativeIntegerDefault0":{"allOf":[{"$ref":"#/definitions/nonNegativeInteger"},{"default":0}]},"simpleTypes":{"enum":["array","boolean","integer","null","number","object","string"]},"stringArray":{"type":"array","items":{"type":"string"},"uniqueItems":true,"default":[]}},"type":["object","boolean"],"properties":{"$id":{"type":"string","format":"uri-reference"},"$schema":{"type":"string","format":"uri"},"$ref":{"type":"string","format":"uri-reference"},"$comment":{"type":"string"},"title":{"type":"string"},"description":{"type":"string"},"default":true,"readOnly":{"type":"boolean","default":false},"examples":{"type":"array","items":true},"multipleOf":{"type":"number","exclusiveMinimum":0},"maximum":{"type":"number"},"exclusiveMaximum":{"type":"number"},"minimum":{"type":"number"},"exclusiveMinimum":{"type":"number"},"maxLength":{"$ref":"#/definitions/nonNegativeInteger"},"minLength":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"pattern":{"type":"string","format":"regex"},"additionalItems":{"$ref":"#"},"items":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/schemaArray"}],"default":true},"maxItems":{"$ref":"#/definitions/nonNegativeInteger"},"minItems":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"uniqueItems":{"type":"boolean","default":false},"contains":{"$ref":"#"},"maxProperties":{"$ref":"#/definitions/nonNegativeInteger"},"minProperties":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"required":{"$ref":"#/definitions/stringArray"},"additionalProperties":{"$ref":"#"},"definitions":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"properties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"patternProperties":{"type":"object","additionalProperties":{"$ref":"#"},"propertyNames":{"format":"regex"},"default":{}},"dependencies":{"type":"object","additionalProperties":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/stringArray"}]}},"propertyNames":{"$ref":"#"},"const":true,"enum":{"type":"array","items":true,"minItems":1,"uniqueItems":true},"type":{"anyOf":[{"$ref":"#/definitions/simpleTypes"},{"type":"array","items":{"$ref":"#/definitions/simpleTypes"},"minItems":1,"uniqueItems":true}]},"format":{"type":"string"},"contentMediaType":{"type":"string"},"contentEncoding":{"type":"string"},"if":{"$ref":"#"},"then":{"$ref":"#"},"else":{"$ref":"#"},"allOf":{"$ref":"#/definitions/schemaArray"},"anyOf":{"$ref":"#/definitions/schemaArray"},"oneOf":{"$ref":"#/definitions/schemaArray"},"not":{"$ref":"#"}},"default":true}'); - -/***/ }), - -/***/ 68: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]'); - -/***/ }), - -/***/ 2357: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 7619: -/***/ ((module) => { - -"use strict"; -module.exports = require("constants"); - -/***/ }), - -/***/ 8614: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 5747: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 8605: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 7211: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1631: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 2087: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 5622: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 4213: -/***/ ((module) => { - -"use strict"; -module.exports = require("punycode"); - -/***/ }), - -/***/ 2413: -/***/ ((module) => { - -"use strict"; -module.exports = require("stream"); - -/***/ }), - -/***/ 4016: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 8835: -/***/ ((module) => { - -"use strict"; -module.exports = require("url"); - -/***/ }), - -/***/ 1669: -/***/ ((module) => { - -"use strict"; -module.exports = require("util"); - -/***/ }), - -/***/ 8761: -/***/ ((module) => { - -"use strict"; -module.exports = require("zlib"); - -/***/ }) - -/******/ }); -/************************************************************************/ -/******/ // The module cache -/******/ var __webpack_module_cache__ = {}; -/******/ -/******/ // The require function -/******/ function __nccwpck_require__(moduleId) { -/******/ // Check if module is in cache -/******/ var cachedModule = __webpack_module_cache__[moduleId]; -/******/ if (cachedModule !== undefined) { -/******/ return cachedModule.exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = __webpack_module_cache__[moduleId] = { -/******/ // no module.id needed -/******/ // no module.loaded needed -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete __webpack_module_cache__[moduleId]; -/******/ } -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/************************************************************************/ -/******/ /* webpack/runtime/compat */ -/******/ -/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; -/******/ -/************************************************************************/ -/******/ -/******/ // startup -/******/ // Load entry module and return exports -/******/ // This entry module is referenced by other modules so it can't be inlined -/******/ var __webpack_exports__ = __nccwpck_require__(3109); -/******/ module.exports = __webpack_exports__; -/******/ -/******/ })() -; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/.github/actions/verify-chart-changelog/dist/index.js.map b/.github/actions/verify-chart-changelog/dist/index.js.map deleted file mode 100644 index f5dfa936..00000000 --- a/.github/actions/verify-chart-changelog/dist/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../webpack://verify-chart-version-bump/./lib/main.js","../webpack://verify-chart-version-bump/./lib/schemas.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/core.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/file-command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/path-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/summary.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/context.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/github.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/internal/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/auth.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/proxy.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/core/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv-formats/dist/formats.js","../webpack://verify-chart-version-bump/./node_modules/ajv-formats/dist/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv-formats/dist/limit.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/ajv.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/codegen/code.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/codegen/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/codegen/scope.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/errors.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/names.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/ref_error.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/resolve.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/rules.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/util.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/applicability.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/boolSchema.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/dataType.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/defaults.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/keyword.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/compile/validate/subschema.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/core.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/runtime/equal.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/runtime/ucs2length.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/runtime/uri.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/runtime/validation_error.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/additionalItems.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/additionalProperties.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/allOf.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/anyOf.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/contains.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/dependencies.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/if.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/items.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/items2020.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/not.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/oneOf.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/patternProperties.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/prefixItems.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/properties.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/propertyNames.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/applicator/thenElse.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/code.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/core/id.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/core/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/core/ref.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/discriminator/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/discriminator/types.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/draft7.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/format/format.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/format/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/metadata.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/const.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/enum.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/index.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/limitItems.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/limitLength.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/limitNumber.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/limitProperties.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/multipleOf.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/pattern.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/required.js","../webpack://verify-chart-version-bump/./node_modules/ajv/dist/vocabularies/validation/uniqueItems.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/index.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/add.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/register.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/remove.js","../webpack://verify-chart-version-bump/./node_modules/deprecation/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/fast-deep-equal/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/empty/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/file.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/link.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-paths.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-type.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/fs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/jsonfile.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/make-dir.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/utils.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/output-file/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/path-exists/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/rimraf.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/stat.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/utimes.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/clone.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/graceful-fs.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/legacy-streams.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/polyfills.js","../webpack://verify-chart-version-bump/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://verify-chart-version-bump/./node_modules/json-schema-traverse/index.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/index.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/utils.js","../webpack://verify-chart-version-bump/./node_modules/node-fetch/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/once/once.js","../webpack://verify-chart-version-bump/./node_modules/tr46/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/lib/tunnel.js","../webpack://verify-chart-version-bump/./node_modules/universal-user-agent/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/universalify/index.js","../webpack://verify-chart-version-bump/./node_modules/uri-js/dist/es5/uri.all.js","../webpack://verify-chart-version-bump/./node_modules/webidl-conversions/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/public-api.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/wrappy/wrappy.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/composer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/Document.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/anchors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/createNode.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/directives.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/errors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/index.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/log.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Alias.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Pair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/toJS.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/lexer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/line-counter.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/parser.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/public-api.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/Schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/null.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/string.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/json/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/tags.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/visit.js","../webpack://verify-chart-version-bump/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://verify-chart-version-bump/external \"assert\"","../webpack://verify-chart-version-bump/external \"constants\"","../webpack://verify-chart-version-bump/external \"events\"","../webpack://verify-chart-version-bump/external \"fs\"","../webpack://verify-chart-version-bump/external \"http\"","../webpack://verify-chart-version-bump/external \"https\"","../webpack://verify-chart-version-bump/external \"net\"","../webpack://verify-chart-version-bump/external \"os\"","../webpack://verify-chart-version-bump/external \"path\"","../webpack://verify-chart-version-bump/external \"punycode\"","../webpack://verify-chart-version-bump/external \"stream\"","../webpack://verify-chart-version-bump/external \"tls\"","../webpack://verify-chart-version-bump/external \"url\"","../webpack://verify-chart-version-bump/external \"util\"","../webpack://verify-chart-version-bump/external \"zlib\"","../webpack://verify-chart-version-bump/webpack/bootstrap","../webpack://verify-chart-version-bump/webpack/runtime/compat","../webpack://verify-chart-version-bump/webpack/startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst YAML = __importStar(require(\"yaml\"));\nconst schemas_1 = require(\"./schemas\");\nconst fs = __importStar(require(\"fs-extra\"));\nfunction getErrorMessage(error) {\n if (error instanceof Error)\n return error.message;\n return String(error);\n}\nfunction getChangelogFromYaml(chartYaml) {\n const changelogAnnotation = \"artifacthub.io/changes\";\n if (chartYaml.annotations) {\n if (chartYaml.annotations[changelogAnnotation]) {\n return chartYaml.annotations[changelogAnnotation];\n }\n }\n return undefined;\n}\nfunction getChartYamlFromRepo(path, ref, token) {\n return __awaiter(this, void 0, void 0, function* () {\n let result = {};\n const octokit = github.getOctokit(token);\n const chartYamlFile = yield octokit.rest.repos.getContent({\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n path: `${path}`,\n ref: ref,\n });\n if (chartYamlFile && \"content\" in chartYamlFile.data) {\n const originalChartYamlContent = Buffer.from(chartYamlFile.data.content, \"base64\").toString(\"utf-8\");\n result = yield YAML.parse(originalChartYamlContent);\n }\n return result;\n });\n}\nfunction getChartYamlFromFile(path) {\n return __awaiter(this, void 0, void 0, function* () {\n const chartYamlFile = yield fs.readFile(path, \"utf8\");\n return YAML.parse(chartYamlFile);\n });\n}\nfunction checkRefExists(ref, token) {\n return __awaiter(this, void 0, void 0, function* () {\n const octokit = github.getOctokit(token);\n try {\n yield octokit.rest.git.getRef({\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n ref: ref,\n });\n }\n catch (error) {\n core.setFailed(`Ref ${ref} was not found for this repository!`);\n return false;\n }\n return true;\n });\n}\nfunction run() {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n try {\n if (github.context.eventName !== \"pull_request\") {\n core.setFailed(\"This action can only run on pull requests!\");\n return;\n }\n // Gather inputs\n const githubToken = core.getInput(\"token\");\n const chart = core.getInput(\"chart\", { required: true });\n const base = core.getInput(\"base\", { required: false });\n // Verify the base ref exists\n if (base && !(yield checkRefExists(base, githubToken))) {\n core.setFailed(`Ref ${base} was not found for this repository!`);\n return;\n }\n // Determine the default branch\n const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch;\n // Validate the chart\n const chartYamlPath = `${chart}/Chart.yaml`;\n if (!(yield fs.pathExists(chartYamlPath))) {\n core.setFailed(`${chart} is not a valid Helm chart folder!`);\n return;\n }\n var originalChartYaml;\n var originalChartChangelog;\n try {\n originalChartYaml = yield getChartYamlFromRepo(chartYamlPath, base || `heads/${defaultBranch}`, githubToken);\n originalChartChangelog = getChangelogFromYaml(originalChartYaml);\n }\n catch (error) {\n core.warning(`Could not find original Chart.yaml for ${chart}, assuming this is a new chart.`);\n }\n const updatedChartYaml = yield getChartYamlFromFile(chartYamlPath);\n const updatedChartChangelog = getChangelogFromYaml(updatedChartYaml);\n if (!updatedChartChangelog) {\n core.setFailed(`${chartYamlPath} does not contain a changelog!`);\n return;\n }\n // Check if the changelog was updated\n if (originalChartChangelog) {\n if (updatedChartChangelog == originalChartChangelog) {\n core.setFailed(`Chart changelog has not been updated!`);\n return;\n }\n }\n // Validate the changelog entries\n const changelogEntries = YAML.parse(updatedChartChangelog);\n changelogEntries.forEach((entry) => {\n var _a;\n const validator = (0, schemas_1.validateAgainstJsonSchema)(entry, schemas_1.changelogEntrySchema);\n if (!validator.valid) {\n (_a = validator.errors) === null || _a === void 0 ? void 0 : _a.forEach((error) => {\n core.setFailed(`${chart} changelog validation failed: ${JSON.stringify(error)}`);\n });\n }\n });\n }\n catch (error) {\n core.setFailed(getErrorMessage(error));\n }\n });\n}\nrun();\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateAgainstJsonSchema = exports.changelogEntrySchema = void 0;\nconst ajv_1 = __importDefault(require(\"ajv\"));\nconst ajv_formats_1 = __importDefault(require(\"ajv-formats\"));\nexports.changelogEntrySchema = {\n type: \"object\",\n properties: {\n kind: {\n type: \"string\",\n enum: [\"added\", \"changed\", \"deprecated\", \"removed\", \"fixed\", \"security\"],\n },\n description: { type: \"string\" },\n links: {\n type: \"array\",\n items: {\n type: \"object\",\n properties: {\n name: { type: \"string\" },\n url: { $ref: \"#/definitions/saneUrl\" },\n },\n required: [\"name\", \"url\"],\n additionalProperties: false,\n },\n },\n },\n required: [\"kind\", \"description\"],\n additionalProperties: false,\n definitions: {\n saneUrl: {\n type: \"string\",\n format: \"uri\",\n pattern: \"^https?://\",\n },\n },\n};\nfunction validateAgainstJsonSchema(object, schema) {\n const ajv = new ajv_1.default();\n (0, ajv_formats_1.default)(ajv);\n const validator = ajv.compile(schema);\n return {\n valid: validator(object),\n errors: validator.errors,\n };\n}\nexports.validateAgainstJsonSchema = validateAgainstJsonSchema;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.21.3\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n enumerableOnly && (symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n })), keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = null != arguments[i] ? arguments[i] : {};\n i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/audit-log\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /enterprises/{enterprise}/settings/billing/advanced-security\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/audit-log\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/external-groups\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/settings/billing/advanced-security\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForEnterprise: [\"GET /enterprises/{enterprise}/actions/cache/usage\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubAdvancedSecurityBillingGhe: [\"GET /enterprises/{enterprise}/settings/billing/advanced-security\"],\n getGithubAdvancedSecurityBillingOrg: [\"GET /orgs/{org}/settings/billing/advanced-security\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n getServerStatistics: [\"GET /enterprise-installation/{enterprise_or_org}/server-statistics\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.16.2\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.formatNames = exports.fastFormats = exports.fullFormats = void 0;\nfunction fmtDef(validate, compare) {\n return { validate, compare };\n}\nexports.fullFormats = {\n // date: http://tools.ietf.org/html/rfc3339#section-5.6\n date: fmtDef(date, compareDate),\n // date-time: http://tools.ietf.org/html/rfc3339#section-5.6\n time: fmtDef(time, compareTime),\n \"date-time\": fmtDef(date_time, compareDateTime),\n // duration: https://tools.ietf.org/html/rfc3339#appendix-A\n duration: /^P(?!$)((\\d+Y)?(\\d+M)?(\\d+D)?(T(?=\\d)(\\d+H)?(\\d+M)?(\\d+S)?)?|(\\d+W)?)$/,\n uri,\n \"uri-reference\": /^(?:[a-z][a-z0-9+\\-.]*:)?(?:\\/?\\/(?:(?:[a-z0-9\\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\\.[a-z0-9\\-._~!$&'()*+,;=:]+)\\]|(?:(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)|(?:[a-z0-9\\-._~!$&'\"()*+,;=]|%[0-9a-f]{2})*)(?::\\d*)?(?:\\/(?:[a-z0-9\\-._~!$&'\"()*+,;=:@]|%[0-9a-f]{2})*)*|\\/(?:(?:[a-z0-9\\-._~!$&'\"()*+,;=:@]|%[0-9a-f]{2})+(?:\\/(?:[a-z0-9\\-._~!$&'\"()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\\-._~!$&'\"()*+,;=:@]|%[0-9a-f]{2})+(?:\\/(?:[a-z0-9\\-._~!$&'\"()*+,;=:@]|%[0-9a-f]{2})*)*)?(?:\\?(?:[a-z0-9\\-._~!$&'\"()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\\-._~!$&'\"()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i,\n // uri-template: https://tools.ietf.org/html/rfc6570\n \"uri-template\": /^(?:(?:[^\\x00-\\x20\"'<>%\\\\^`{|}]|%[0-9a-f]{2})|\\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\\*)?)*\\})*$/i,\n // For the source: https://gist.github.com/dperini/729294\n // For test cases: https://mathiasbynens.be/demo/url-regex\n url: /^(?:https?|ftp):\\/\\/(?:\\S+(?::\\S*)?@)?(?:(?!(?:10|127)(?:\\.\\d{1,3}){3})(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z0-9\\u{00a1}-\\u{ffff}]+-)*[a-z0-9\\u{00a1}-\\u{ffff}]+)(?:\\.(?:[a-z0-9\\u{00a1}-\\u{ffff}]+-)*[a-z0-9\\u{00a1}-\\u{ffff}]+)*(?:\\.(?:[a-z\\u{00a1}-\\u{ffff}]{2,})))(?::\\d{2,5})?(?:\\/[^\\s]*)?$/iu,\n email: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,\n hostname: /^(?=.{1,253}\\.?$)[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\\.[a-z0-9](?:[-0-9a-z]{0,61}[0-9a-z])?)*\\.?$/i,\n // optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html\n ipv4: /^(?:(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)$/,\n ipv6: /^((([0-9a-f]{1,4}:){7}([0-9a-f]{1,4}|:))|(([0-9a-f]{1,4}:){6}(:[0-9a-f]{1,4}|((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9a-f]{1,4}:){5}(((:[0-9a-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9a-f]{1,4}:){4}(((:[0-9a-f]{1,4}){1,3})|((:[0-9a-f]{1,4})?:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9a-f]{1,4}:){3}(((:[0-9a-f]{1,4}){1,4})|((:[0-9a-f]{1,4}){0,2}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9a-f]{1,4}:){2}(((:[0-9a-f]{1,4}){1,5})|((:[0-9a-f]{1,4}){0,3}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9a-f]{1,4}:){1}(((:[0-9a-f]{1,4}){1,6})|((:[0-9a-f]{1,4}){0,4}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(:(((:[0-9a-f]{1,4}){1,7})|((:[0-9a-f]{1,4}){0,5}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:)))$/i,\n regex,\n // uuid: http://tools.ietf.org/html/rfc4122\n uuid: /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i,\n // JSON-pointer: https://tools.ietf.org/html/rfc6901\n // uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A\n \"json-pointer\": /^(?:\\/(?:[^~/]|~0|~1)*)*$/,\n \"json-pointer-uri-fragment\": /^#(?:\\/(?:[a-z0-9_\\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i,\n // relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00\n \"relative-json-pointer\": /^(?:0|[1-9][0-9]*)(?:#|(?:\\/(?:[^~/]|~0|~1)*)*)$/,\n // the following formats are used by the openapi specification: https://spec.openapis.org/oas/v3.0.0#data-types\n // byte: https://github.com/miguelmota/is-base64\n byte,\n // signed 32 bit integer\n int32: { type: \"number\", validate: validateInt32 },\n // signed 64 bit integer\n int64: { type: \"number\", validate: validateInt64 },\n // C-type float\n float: { type: \"number\", validate: validateNumber },\n // C-type double\n double: { type: \"number\", validate: validateNumber },\n // hint to the UI to hide input strings\n password: true,\n // unchecked string payload\n binary: true,\n};\nexports.fastFormats = {\n ...exports.fullFormats,\n date: fmtDef(/^\\d\\d\\d\\d-[0-1]\\d-[0-3]\\d$/, compareDate),\n time: fmtDef(/^(?:[0-2]\\d:[0-5]\\d:[0-5]\\d|23:59:60)(?:\\.\\d+)?(?:z|[+-]\\d\\d(?::?\\d\\d)?)?$/i, compareTime),\n \"date-time\": fmtDef(/^\\d\\d\\d\\d-[0-1]\\d-[0-3]\\d[t\\s](?:[0-2]\\d:[0-5]\\d:[0-5]\\d|23:59:60)(?:\\.\\d+)?(?:z|[+-]\\d\\d(?::?\\d\\d)?)$/i, compareDateTime),\n // uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js\n uri: /^(?:[a-z][a-z0-9+\\-.]*:)(?:\\/?\\/)?[^\\s]*$/i,\n \"uri-reference\": /^(?:(?:[a-z][a-z0-9+\\-.]*:)?\\/?\\/)?(?:[^\\\\\\s#][^\\s#]*)?(?:#[^\\\\\\s]*)?$/i,\n // email (sources from jsen validator):\n // http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363\n // http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'wilful violation')\n email: /^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,\n};\nexports.formatNames = Object.keys(exports.fullFormats);\nfunction isLeapYear(year) {\n // https://tools.ietf.org/html/rfc3339#appendix-C\n return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0);\n}\nconst DATE = /^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$/;\nconst DAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];\nfunction date(str) {\n // full-date from http://tools.ietf.org/html/rfc3339#section-5.6\n const matches = DATE.exec(str);\n if (!matches)\n return false;\n const year = +matches[1];\n const month = +matches[2];\n const day = +matches[3];\n return (month >= 1 &&\n month <= 12 &&\n day >= 1 &&\n day <= (month === 2 && isLeapYear(year) ? 29 : DAYS[month]));\n}\nfunction compareDate(d1, d2) {\n if (!(d1 && d2))\n return undefined;\n if (d1 > d2)\n return 1;\n if (d1 < d2)\n return -1;\n return 0;\n}\nconst TIME = /^(\\d\\d):(\\d\\d):(\\d\\d)(\\.\\d+)?(z|[+-]\\d\\d(?::?\\d\\d)?)?$/i;\nfunction time(str, withTimeZone) {\n const matches = TIME.exec(str);\n if (!matches)\n return false;\n const hour = +matches[1];\n const minute = +matches[2];\n const second = +matches[3];\n const timeZone = matches[5];\n return (((hour <= 23 && minute <= 59 && second <= 59) ||\n (hour === 23 && minute === 59 && second === 60)) &&\n (!withTimeZone || timeZone !== \"\"));\n}\nfunction compareTime(t1, t2) {\n if (!(t1 && t2))\n return undefined;\n const a1 = TIME.exec(t1);\n const a2 = TIME.exec(t2);\n if (!(a1 && a2))\n return undefined;\n t1 = a1[1] + a1[2] + a1[3] + (a1[4] || \"\");\n t2 = a2[1] + a2[2] + a2[3] + (a2[4] || \"\");\n if (t1 > t2)\n return 1;\n if (t1 < t2)\n return -1;\n return 0;\n}\nconst DATE_TIME_SEPARATOR = /t|\\s/i;\nfunction date_time(str) {\n // http://tools.ietf.org/html/rfc3339#section-5.6\n const dateTime = str.split(DATE_TIME_SEPARATOR);\n return dateTime.length === 2 && date(dateTime[0]) && time(dateTime[1], true);\n}\nfunction compareDateTime(dt1, dt2) {\n if (!(dt1 && dt2))\n return undefined;\n const [d1, t1] = dt1.split(DATE_TIME_SEPARATOR);\n const [d2, t2] = dt2.split(DATE_TIME_SEPARATOR);\n const res = compareDate(d1, d2);\n if (res === undefined)\n return undefined;\n return res || compareTime(t1, t2);\n}\nconst NOT_URI_FRAGMENT = /\\/|:/;\nconst URI = /^(?:[a-z][a-z0-9+\\-.]*:)(?:\\/?\\/(?:(?:[a-z0-9\\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\\.[a-z0-9\\-._~!$&'()*+,;=:]+)\\]|(?:(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d\\d?)|(?:[a-z0-9\\-._~!$&'()*+,;=]|%[0-9a-f]{2})*)(?::\\d*)?(?:\\/(?:[a-z0-9\\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*|\\/(?:(?:[a-z0-9\\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\\/(?:[a-z0-9\\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\\/(?:[a-z0-9\\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)(?:\\?(?:[a-z0-9\\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i;\nfunction uri(str) {\n // http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required \".\"\n return NOT_URI_FRAGMENT.test(str) && URI.test(str);\n}\nconst BYTE = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/gm;\nfunction byte(str) {\n BYTE.lastIndex = 0;\n return BYTE.test(str);\n}\nconst MIN_INT32 = -(2 ** 31);\nconst MAX_INT32 = 2 ** 31 - 1;\nfunction validateInt32(value) {\n return Number.isInteger(value) && value <= MAX_INT32 && value >= MIN_INT32;\n}\nfunction validateInt64(value) {\n // JSON and javascript max Int is 2**53, so any int that passes isInteger is valid for Int64\n return Number.isInteger(value);\n}\nfunction validateNumber() {\n return true;\n}\nconst Z_ANCHOR = /[^\\\\]\\\\Z/;\nfunction regex(str) {\n if (Z_ANCHOR.test(str))\n return false;\n try {\n new RegExp(str);\n return true;\n }\n catch (e) {\n return false;\n }\n}\n//# sourceMappingURL=formats.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst formats_1 = require(\"./formats\");\nconst limit_1 = require(\"./limit\");\nconst codegen_1 = require(\"ajv/dist/compile/codegen\");\nconst fullName = new codegen_1.Name(\"fullFormats\");\nconst fastName = new codegen_1.Name(\"fastFormats\");\nconst formatsPlugin = (ajv, opts = { keywords: true }) => {\n if (Array.isArray(opts)) {\n addFormats(ajv, opts, formats_1.fullFormats, fullName);\n return ajv;\n }\n const [formats, exportName] = opts.mode === \"fast\" ? [formats_1.fastFormats, fastName] : [formats_1.fullFormats, fullName];\n const list = opts.formats || formats_1.formatNames;\n addFormats(ajv, list, formats, exportName);\n if (opts.keywords)\n limit_1.default(ajv);\n return ajv;\n};\nformatsPlugin.get = (name, mode = \"full\") => {\n const formats = mode === \"fast\" ? formats_1.fastFormats : formats_1.fullFormats;\n const f = formats[name];\n if (!f)\n throw new Error(`Unknown format \"${name}\"`);\n return f;\n};\nfunction addFormats(ajv, list, fs, exportName) {\n var _a;\n var _b;\n (_a = (_b = ajv.opts.code).formats) !== null && _a !== void 0 ? _a : (_b.formats = codegen_1._ `require(\"ajv-formats/dist/formats\").${exportName}`);\n for (const f of list)\n ajv.addFormat(f, fs[f]);\n}\nmodule.exports = exports = formatsPlugin;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = formatsPlugin;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.formatLimitDefinition = void 0;\nconst ajv_1 = require(\"ajv\");\nconst codegen_1 = require(\"ajv/dist/compile/codegen\");\nconst ops = codegen_1.operators;\nconst KWDs = {\n formatMaximum: { okStr: \"<=\", ok: ops.LTE, fail: ops.GT },\n formatMinimum: { okStr: \">=\", ok: ops.GTE, fail: ops.LT },\n formatExclusiveMaximum: { okStr: \"<\", ok: ops.LT, fail: ops.GTE },\n formatExclusiveMinimum: { okStr: \">\", ok: ops.GT, fail: ops.LTE },\n};\nconst error = {\n message: ({ keyword, schemaCode }) => codegen_1.str `should be ${KWDs[keyword].okStr} ${schemaCode}`,\n params: ({ keyword, schemaCode }) => codegen_1._ `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`,\n};\nexports.formatLimitDefinition = {\n keyword: Object.keys(KWDs),\n type: \"string\",\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, schemaCode, keyword, it } = cxt;\n const { opts, self } = it;\n if (!opts.validateFormats)\n return;\n const fCxt = new ajv_1.KeywordCxt(it, self.RULES.all.format.definition, \"format\");\n if (fCxt.$data)\n validate$DataFormat();\n else\n validateFormat();\n function validate$DataFormat() {\n const fmts = gen.scopeValue(\"formats\", {\n ref: self.formats,\n code: opts.code.formats,\n });\n const fmt = gen.const(\"fmt\", codegen_1._ `${fmts}[${fCxt.schemaCode}]`);\n cxt.fail$data(codegen_1.or(codegen_1._ `typeof ${fmt} != \"object\"`, codegen_1._ `${fmt} instanceof RegExp`, codegen_1._ `typeof ${fmt}.compare != \"function\"`, compareCode(fmt)));\n }\n function validateFormat() {\n const format = fCxt.schema;\n const fmtDef = self.formats[format];\n if (!fmtDef || fmtDef === true)\n return;\n if (typeof fmtDef != \"object\" ||\n fmtDef instanceof RegExp ||\n typeof fmtDef.compare != \"function\") {\n throw new Error(`\"${keyword}\": format \"${format}\" does not define \"compare\" function`);\n }\n const fmt = gen.scopeValue(\"formats\", {\n key: format,\n ref: fmtDef,\n code: opts.code.formats ? codegen_1._ `${opts.code.formats}${codegen_1.getProperty(format)}` : undefined,\n });\n cxt.fail$data(compareCode(fmt));\n }\n function compareCode(fmt) {\n return codegen_1._ `${fmt}.compare(${data}, ${schemaCode}) ${KWDs[keyword].fail} 0`;\n }\n },\n dependencies: [\"format\"],\n};\nconst formatLimitPlugin = (ajv) => {\n ajv.addKeyword(exports.formatLimitDefinition);\n return ajv;\n};\nexports.default = formatLimitPlugin;\n//# sourceMappingURL=limit.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nconst core_1 = require(\"./core\");\nconst draft7_1 = require(\"./vocabularies/draft7\");\nconst discriminator_1 = require(\"./vocabularies/discriminator\");\nconst draft7MetaSchema = require(\"./refs/json-schema-draft-07.json\");\nconst META_SUPPORT_DATA = [\"/properties\"];\nconst META_SCHEMA_ID = \"http://json-schema.org/draft-07/schema\";\nclass Ajv extends core_1.default {\n _addVocabularies() {\n super._addVocabularies();\n draft7_1.default.forEach((v) => this.addVocabulary(v));\n if (this.opts.discriminator)\n this.addKeyword(discriminator_1.default);\n }\n _addDefaultMetaSchema() {\n super._addDefaultMetaSchema();\n if (!this.opts.meta)\n return;\n const metaSchema = this.opts.$data\n ? this.$dataMetaSchema(draft7MetaSchema, META_SUPPORT_DATA)\n : draft7MetaSchema;\n this.addMetaSchema(metaSchema, META_SCHEMA_ID, false);\n this.refs[\"http://json-schema.org/schema\"] = META_SCHEMA_ID;\n }\n defaultMeta() {\n return (this.opts.defaultMeta =\n super.defaultMeta() || (this.getSchema(META_SCHEMA_ID) ? META_SCHEMA_ID : undefined));\n }\n}\nmodule.exports = exports = Ajv;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = Ajv;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\n//# sourceMappingURL=ajv.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.regexpCode = exports.getEsmExportName = exports.getProperty = exports.safeStringify = exports.stringify = exports.strConcat = exports.addCodeArg = exports.str = exports._ = exports.nil = exports._Code = exports.Name = exports.IDENTIFIER = exports._CodeOrName = void 0;\nclass _CodeOrName {\n}\nexports._CodeOrName = _CodeOrName;\nexports.IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i;\nclass Name extends _CodeOrName {\n constructor(s) {\n super();\n if (!exports.IDENTIFIER.test(s))\n throw new Error(\"CodeGen: name must be a valid identifier\");\n this.str = s;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n return false;\n }\n get names() {\n return { [this.str]: 1 };\n }\n}\nexports.Name = Name;\nclass _Code extends _CodeOrName {\n constructor(code) {\n super();\n this._items = typeof code === \"string\" ? [code] : code;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n if (this._items.length > 1)\n return false;\n const item = this._items[0];\n return item === \"\" || item === '\"\"';\n }\n get str() {\n var _a;\n return ((_a = this._str) !== null && _a !== void 0 ? _a : (this._str = this._items.reduce((s, c) => `${s}${c}`, \"\")));\n }\n get names() {\n var _a;\n return ((_a = this._names) !== null && _a !== void 0 ? _a : (this._names = this._items.reduce((names, c) => {\n if (c instanceof Name)\n names[c.str] = (names[c.str] || 0) + 1;\n return names;\n }, {})));\n }\n}\nexports._Code = _Code;\nexports.nil = new _Code(\"\");\nfunction _(strs, ...args) {\n const code = [strs[0]];\n let i = 0;\n while (i < args.length) {\n addCodeArg(code, args[i]);\n code.push(strs[++i]);\n }\n return new _Code(code);\n}\nexports._ = _;\nconst plus = new _Code(\"+\");\nfunction str(strs, ...args) {\n const expr = [safeStringify(strs[0])];\n let i = 0;\n while (i < args.length) {\n expr.push(plus);\n addCodeArg(expr, args[i]);\n expr.push(plus, safeStringify(strs[++i]));\n }\n optimize(expr);\n return new _Code(expr);\n}\nexports.str = str;\nfunction addCodeArg(code, arg) {\n if (arg instanceof _Code)\n code.push(...arg._items);\n else if (arg instanceof Name)\n code.push(arg);\n else\n code.push(interpolate(arg));\n}\nexports.addCodeArg = addCodeArg;\nfunction optimize(expr) {\n let i = 1;\n while (i < expr.length - 1) {\n if (expr[i] === plus) {\n const res = mergeExprItems(expr[i - 1], expr[i + 1]);\n if (res !== undefined) {\n expr.splice(i - 1, 3, res);\n continue;\n }\n expr[i++] = \"+\";\n }\n i++;\n }\n}\nfunction mergeExprItems(a, b) {\n if (b === '\"\"')\n return a;\n if (a === '\"\"')\n return b;\n if (typeof a == \"string\") {\n if (b instanceof Name || a[a.length - 1] !== '\"')\n return;\n if (typeof b != \"string\")\n return `${a.slice(0, -1)}${b}\"`;\n if (b[0] === '\"')\n return a.slice(0, -1) + b.slice(1);\n return;\n }\n if (typeof b == \"string\" && b[0] === '\"' && !(a instanceof Name))\n return `\"${a}${b.slice(1)}`;\n return;\n}\nfunction strConcat(c1, c2) {\n return c2.emptyStr() ? c1 : c1.emptyStr() ? c2 : str `${c1}${c2}`;\n}\nexports.strConcat = strConcat;\n// TODO do not allow arrays here\nfunction interpolate(x) {\n return typeof x == \"number\" || typeof x == \"boolean\" || x === null\n ? x\n : safeStringify(Array.isArray(x) ? x.join(\",\") : x);\n}\nfunction stringify(x) {\n return new _Code(safeStringify(x));\n}\nexports.stringify = stringify;\nfunction safeStringify(x) {\n return JSON.stringify(x)\n .replace(/\\u2028/g, \"\\\\u2028\")\n .replace(/\\u2029/g, \"\\\\u2029\");\n}\nexports.safeStringify = safeStringify;\nfunction getProperty(key) {\n return typeof key == \"string\" && exports.IDENTIFIER.test(key) ? new _Code(`.${key}`) : _ `[${key}]`;\n}\nexports.getProperty = getProperty;\n//Does best effort to format the name properly\nfunction getEsmExportName(key) {\n if (typeof key == \"string\" && exports.IDENTIFIER.test(key)) {\n return new _Code(`${key}`);\n }\n throw new Error(`CodeGen: invalid export name: ${key}, use explicit $id name mapping`);\n}\nexports.getEsmExportName = getEsmExportName;\nfunction regexpCode(rx) {\n return new _Code(rx.toString());\n}\nexports.regexpCode = regexpCode;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.or = exports.and = exports.not = exports.CodeGen = exports.operators = exports.varKinds = exports.ValueScopeName = exports.ValueScope = exports.Scope = exports.Name = exports.regexpCode = exports.stringify = exports.getProperty = exports.nil = exports.strConcat = exports.str = exports._ = void 0;\nconst code_1 = require(\"./code\");\nconst scope_1 = require(\"./scope\");\nvar code_2 = require(\"./code\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return code_2._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return code_2.str; } });\nObject.defineProperty(exports, \"strConcat\", { enumerable: true, get: function () { return code_2.strConcat; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return code_2.nil; } });\nObject.defineProperty(exports, \"getProperty\", { enumerable: true, get: function () { return code_2.getProperty; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return code_2.stringify; } });\nObject.defineProperty(exports, \"regexpCode\", { enumerable: true, get: function () { return code_2.regexpCode; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return code_2.Name; } });\nvar scope_2 = require(\"./scope\");\nObject.defineProperty(exports, \"Scope\", { enumerable: true, get: function () { return scope_2.Scope; } });\nObject.defineProperty(exports, \"ValueScope\", { enumerable: true, get: function () { return scope_2.ValueScope; } });\nObject.defineProperty(exports, \"ValueScopeName\", { enumerable: true, get: function () { return scope_2.ValueScopeName; } });\nObject.defineProperty(exports, \"varKinds\", { enumerable: true, get: function () { return scope_2.varKinds; } });\nexports.operators = {\n GT: new code_1._Code(\">\"),\n GTE: new code_1._Code(\">=\"),\n LT: new code_1._Code(\"<\"),\n LTE: new code_1._Code(\"<=\"),\n EQ: new code_1._Code(\"===\"),\n NEQ: new code_1._Code(\"!==\"),\n NOT: new code_1._Code(\"!\"),\n OR: new code_1._Code(\"||\"),\n AND: new code_1._Code(\"&&\"),\n ADD: new code_1._Code(\"+\"),\n};\nclass Node {\n optimizeNodes() {\n return this;\n }\n optimizeNames(_names, _constants) {\n return this;\n }\n}\nclass Def extends Node {\n constructor(varKind, name, rhs) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.rhs = rhs;\n }\n render({ es5, _n }) {\n const varKind = es5 ? scope_1.varKinds.var : this.varKind;\n const rhs = this.rhs === undefined ? \"\" : ` = ${this.rhs}`;\n return `${varKind} ${this.name}${rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (!names[this.name.str])\n return;\n if (this.rhs)\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n return this.rhs instanceof code_1._CodeOrName ? this.rhs.names : {};\n }\n}\nclass Assign extends Node {\n constructor(lhs, rhs, sideEffects) {\n super();\n this.lhs = lhs;\n this.rhs = rhs;\n this.sideEffects = sideEffects;\n }\n render({ _n }) {\n return `${this.lhs} = ${this.rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (this.lhs instanceof code_1.Name && !names[this.lhs.str] && !this.sideEffects)\n return;\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n const names = this.lhs instanceof code_1.Name ? {} : { ...this.lhs.names };\n return addExprNames(names, this.rhs);\n }\n}\nclass AssignOp extends Assign {\n constructor(lhs, op, rhs, sideEffects) {\n super(lhs, rhs, sideEffects);\n this.op = op;\n }\n render({ _n }) {\n return `${this.lhs} ${this.op}= ${this.rhs};` + _n;\n }\n}\nclass Label extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n return `${this.label}:` + _n;\n }\n}\nclass Break extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n const label = this.label ? ` ${this.label}` : \"\";\n return `break${label};` + _n;\n }\n}\nclass Throw extends Node {\n constructor(error) {\n super();\n this.error = error;\n }\n render({ _n }) {\n return `throw ${this.error};` + _n;\n }\n get names() {\n return this.error.names;\n }\n}\nclass AnyCode extends Node {\n constructor(code) {\n super();\n this.code = code;\n }\n render({ _n }) {\n return `${this.code};` + _n;\n }\n optimizeNodes() {\n return `${this.code}` ? this : undefined;\n }\n optimizeNames(names, constants) {\n this.code = optimizeExpr(this.code, names, constants);\n return this;\n }\n get names() {\n return this.code instanceof code_1._CodeOrName ? this.code.names : {};\n }\n}\nclass ParentNode extends Node {\n constructor(nodes = []) {\n super();\n this.nodes = nodes;\n }\n render(opts) {\n return this.nodes.reduce((code, n) => code + n.render(opts), \"\");\n }\n optimizeNodes() {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n const n = nodes[i].optimizeNodes();\n if (Array.isArray(n))\n nodes.splice(i, 1, ...n);\n else if (n)\n nodes[i] = n;\n else\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n optimizeNames(names, constants) {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n // iterating backwards improves 1-pass optimization\n const n = nodes[i];\n if (n.optimizeNames(names, constants))\n continue;\n subtractNames(names, n.names);\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n get names() {\n return this.nodes.reduce((names, n) => addNames(names, n.names), {});\n }\n}\nclass BlockNode extends ParentNode {\n render(opts) {\n return \"{\" + opts._n + super.render(opts) + \"}\" + opts._n;\n }\n}\nclass Root extends ParentNode {\n}\nclass Else extends BlockNode {\n}\nElse.kind = \"else\";\nclass If extends BlockNode {\n constructor(condition, nodes) {\n super(nodes);\n this.condition = condition;\n }\n render(opts) {\n let code = `if(${this.condition})` + super.render(opts);\n if (this.else)\n code += \"else \" + this.else.render(opts);\n return code;\n }\n optimizeNodes() {\n super.optimizeNodes();\n const cond = this.condition;\n if (cond === true)\n return this.nodes; // else is ignored here\n let e = this.else;\n if (e) {\n const ns = e.optimizeNodes();\n e = this.else = Array.isArray(ns) ? new Else(ns) : ns;\n }\n if (e) {\n if (cond === false)\n return e instanceof If ? e : e.nodes;\n if (this.nodes.length)\n return this;\n return new If(not(cond), e instanceof If ? [e] : e.nodes);\n }\n if (cond === false || !this.nodes.length)\n return undefined;\n return this;\n }\n optimizeNames(names, constants) {\n var _a;\n this.else = (_a = this.else) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n if (!(super.optimizeNames(names, constants) || this.else))\n return;\n this.condition = optimizeExpr(this.condition, names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n addExprNames(names, this.condition);\n if (this.else)\n addNames(names, this.else.names);\n return names;\n }\n}\nIf.kind = \"if\";\nclass For extends BlockNode {\n}\nFor.kind = \"for\";\nclass ForLoop extends For {\n constructor(iteration) {\n super();\n this.iteration = iteration;\n }\n render(opts) {\n return `for(${this.iteration})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iteration = optimizeExpr(this.iteration, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iteration.names);\n }\n}\nclass ForRange extends For {\n constructor(varKind, name, from, to) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.from = from;\n this.to = to;\n }\n render(opts) {\n const varKind = opts.es5 ? scope_1.varKinds.var : this.varKind;\n const { name, from, to } = this;\n return `for(${varKind} ${name}=${from}; ${name}<${to}; ${name}++)` + super.render(opts);\n }\n get names() {\n const names = addExprNames(super.names, this.from);\n return addExprNames(names, this.to);\n }\n}\nclass ForIter extends For {\n constructor(loop, varKind, name, iterable) {\n super();\n this.loop = loop;\n this.varKind = varKind;\n this.name = name;\n this.iterable = iterable;\n }\n render(opts) {\n return `for(${this.varKind} ${this.name} ${this.loop} ${this.iterable})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iterable = optimizeExpr(this.iterable, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iterable.names);\n }\n}\nclass Func extends BlockNode {\n constructor(name, args, async) {\n super();\n this.name = name;\n this.args = args;\n this.async = async;\n }\n render(opts) {\n const _async = this.async ? \"async \" : \"\";\n return `${_async}function ${this.name}(${this.args})` + super.render(opts);\n }\n}\nFunc.kind = \"func\";\nclass Return extends ParentNode {\n render(opts) {\n return \"return \" + super.render(opts);\n }\n}\nReturn.kind = \"return\";\nclass Try extends BlockNode {\n render(opts) {\n let code = \"try\" + super.render(opts);\n if (this.catch)\n code += this.catch.render(opts);\n if (this.finally)\n code += this.finally.render(opts);\n return code;\n }\n optimizeNodes() {\n var _a, _b;\n super.optimizeNodes();\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNodes();\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNodes();\n return this;\n }\n optimizeNames(names, constants) {\n var _a, _b;\n super.optimizeNames(names, constants);\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNames(names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n if (this.catch)\n addNames(names, this.catch.names);\n if (this.finally)\n addNames(names, this.finally.names);\n return names;\n }\n}\nclass Catch extends BlockNode {\n constructor(error) {\n super();\n this.error = error;\n }\n render(opts) {\n return `catch(${this.error})` + super.render(opts);\n }\n}\nCatch.kind = \"catch\";\nclass Finally extends BlockNode {\n render(opts) {\n return \"finally\" + super.render(opts);\n }\n}\nFinally.kind = \"finally\";\nclass CodeGen {\n constructor(extScope, opts = {}) {\n this._values = {};\n this._blockStarts = [];\n this._constants = {};\n this.opts = { ...opts, _n: opts.lines ? \"\\n\" : \"\" };\n this._extScope = extScope;\n this._scope = new scope_1.Scope({ parent: extScope });\n this._nodes = [new Root()];\n }\n toString() {\n return this._root.render(this.opts);\n }\n // returns unique name in the internal scope\n name(prefix) {\n return this._scope.name(prefix);\n }\n // reserves unique name in the external scope\n scopeName(prefix) {\n return this._extScope.name(prefix);\n }\n // reserves unique name in the external scope and assigns value to it\n scopeValue(prefixOrName, value) {\n const name = this._extScope.value(prefixOrName, value);\n const vs = this._values[name.prefix] || (this._values[name.prefix] = new Set());\n vs.add(name);\n return name;\n }\n getScopeValue(prefix, keyOrRef) {\n return this._extScope.getValue(prefix, keyOrRef);\n }\n // return code that assigns values in the external scope to the names that are used internally\n // (same names that were returned by gen.scopeName or gen.scopeValue)\n scopeRefs(scopeName) {\n return this._extScope.scopeRefs(scopeName, this._values);\n }\n scopeCode() {\n return this._extScope.scopeCode(this._values);\n }\n _def(varKind, nameOrPrefix, rhs, constant) {\n const name = this._scope.toName(nameOrPrefix);\n if (rhs !== undefined && constant)\n this._constants[name.str] = rhs;\n this._leafNode(new Def(varKind, name, rhs));\n return name;\n }\n // `const` declaration (`var` in es5 mode)\n const(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.const, nameOrPrefix, rhs, _constant);\n }\n // `let` declaration with optional assignment (`var` in es5 mode)\n let(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.let, nameOrPrefix, rhs, _constant);\n }\n // `var` declaration with optional assignment\n var(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.var, nameOrPrefix, rhs, _constant);\n }\n // assignment code\n assign(lhs, rhs, sideEffects) {\n return this._leafNode(new Assign(lhs, rhs, sideEffects));\n }\n // `+=` code\n add(lhs, rhs) {\n return this._leafNode(new AssignOp(lhs, exports.operators.ADD, rhs));\n }\n // appends passed SafeExpr to code or executes Block\n code(c) {\n if (typeof c == \"function\")\n c();\n else if (c !== code_1.nil)\n this._leafNode(new AnyCode(c));\n return this;\n }\n // returns code for object literal for the passed argument list of key-value pairs\n object(...keyValues) {\n const code = [\"{\"];\n for (const [key, value] of keyValues) {\n if (code.length > 1)\n code.push(\",\");\n code.push(key);\n if (key !== value || this.opts.es5) {\n code.push(\":\");\n (0, code_1.addCodeArg)(code, value);\n }\n }\n code.push(\"}\");\n return new code_1._Code(code);\n }\n // `if` clause (or statement if `thenBody` and, optionally, `elseBody` are passed)\n if(condition, thenBody, elseBody) {\n this._blockNode(new If(condition));\n if (thenBody && elseBody) {\n this.code(thenBody).else().code(elseBody).endIf();\n }\n else if (thenBody) {\n this.code(thenBody).endIf();\n }\n else if (elseBody) {\n throw new Error('CodeGen: \"else\" body without \"then\" body');\n }\n return this;\n }\n // `else if` clause - invalid without `if` or after `else` clauses\n elseIf(condition) {\n return this._elseNode(new If(condition));\n }\n // `else` clause - only valid after `if` or `else if` clauses\n else() {\n return this._elseNode(new Else());\n }\n // end `if` statement (needed if gen.if was used only with condition)\n endIf() {\n return this._endBlockNode(If, Else);\n }\n _for(node, forBody) {\n this._blockNode(node);\n if (forBody)\n this.code(forBody).endFor();\n return this;\n }\n // a generic `for` clause (or statement if `forBody` is passed)\n for(iteration, forBody) {\n return this._for(new ForLoop(iteration), forBody);\n }\n // `for` statement for a range of values\n forRange(nameOrPrefix, from, to, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.let) {\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForRange(varKind, name, from, to), () => forBody(name));\n }\n // `for-of` statement (in es5 mode replace with a normal for loop)\n forOf(nameOrPrefix, iterable, forBody, varKind = scope_1.varKinds.const) {\n const name = this._scope.toName(nameOrPrefix);\n if (this.opts.es5) {\n const arr = iterable instanceof code_1.Name ? iterable : this.var(\"_arr\", iterable);\n return this.forRange(\"_i\", 0, (0, code_1._) `${arr}.length`, (i) => {\n this.var(name, (0, code_1._) `${arr}[${i}]`);\n forBody(name);\n });\n }\n return this._for(new ForIter(\"of\", varKind, name, iterable), () => forBody(name));\n }\n // `for-in` statement.\n // With option `ownProperties` replaced with a `for-of` loop for object keys\n forIn(nameOrPrefix, obj, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.const) {\n if (this.opts.ownProperties) {\n return this.forOf(nameOrPrefix, (0, code_1._) `Object.keys(${obj})`, forBody);\n }\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForIter(\"in\", varKind, name, obj), () => forBody(name));\n }\n // end `for` loop\n endFor() {\n return this._endBlockNode(For);\n }\n // `label` statement\n label(label) {\n return this._leafNode(new Label(label));\n }\n // `break` statement\n break(label) {\n return this._leafNode(new Break(label));\n }\n // `return` statement\n return(value) {\n const node = new Return();\n this._blockNode(node);\n this.code(value);\n if (node.nodes.length !== 1)\n throw new Error('CodeGen: \"return\" should have one node');\n return this._endBlockNode(Return);\n }\n // `try` statement\n try(tryBody, catchCode, finallyCode) {\n if (!catchCode && !finallyCode)\n throw new Error('CodeGen: \"try\" without \"catch\" and \"finally\"');\n const node = new Try();\n this._blockNode(node);\n this.code(tryBody);\n if (catchCode) {\n const error = this.name(\"e\");\n this._currNode = node.catch = new Catch(error);\n catchCode(error);\n }\n if (finallyCode) {\n this._currNode = node.finally = new Finally();\n this.code(finallyCode);\n }\n return this._endBlockNode(Catch, Finally);\n }\n // `throw` statement\n throw(error) {\n return this._leafNode(new Throw(error));\n }\n // start self-balancing block\n block(body, nodeCount) {\n this._blockStarts.push(this._nodes.length);\n if (body)\n this.code(body).endBlock(nodeCount);\n return this;\n }\n // end the current self-balancing block\n endBlock(nodeCount) {\n const len = this._blockStarts.pop();\n if (len === undefined)\n throw new Error(\"CodeGen: not in self-balancing block\");\n const toClose = this._nodes.length - len;\n if (toClose < 0 || (nodeCount !== undefined && toClose !== nodeCount)) {\n throw new Error(`CodeGen: wrong number of nodes: ${toClose} vs ${nodeCount} expected`);\n }\n this._nodes.length = len;\n return this;\n }\n // `function` heading (or definition if funcBody is passed)\n func(name, args = code_1.nil, async, funcBody) {\n this._blockNode(new Func(name, args, async));\n if (funcBody)\n this.code(funcBody).endFunc();\n return this;\n }\n // end function definition\n endFunc() {\n return this._endBlockNode(Func);\n }\n optimize(n = 1) {\n while (n-- > 0) {\n this._root.optimizeNodes();\n this._root.optimizeNames(this._root.names, this._constants);\n }\n }\n _leafNode(node) {\n this._currNode.nodes.push(node);\n return this;\n }\n _blockNode(node) {\n this._currNode.nodes.push(node);\n this._nodes.push(node);\n }\n _endBlockNode(N1, N2) {\n const n = this._currNode;\n if (n instanceof N1 || (N2 && n instanceof N2)) {\n this._nodes.pop();\n return this;\n }\n throw new Error(`CodeGen: not in block \"${N2 ? `${N1.kind}/${N2.kind}` : N1.kind}\"`);\n }\n _elseNode(node) {\n const n = this._currNode;\n if (!(n instanceof If)) {\n throw new Error('CodeGen: \"else\" without \"if\"');\n }\n this._currNode = n.else = node;\n return this;\n }\n get _root() {\n return this._nodes[0];\n }\n get _currNode() {\n const ns = this._nodes;\n return ns[ns.length - 1];\n }\n set _currNode(node) {\n const ns = this._nodes;\n ns[ns.length - 1] = node;\n }\n}\nexports.CodeGen = CodeGen;\nfunction addNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) + (from[n] || 0);\n return names;\n}\nfunction addExprNames(names, from) {\n return from instanceof code_1._CodeOrName ? addNames(names, from.names) : names;\n}\nfunction optimizeExpr(expr, names, constants) {\n if (expr instanceof code_1.Name)\n return replaceName(expr);\n if (!canOptimize(expr))\n return expr;\n return new code_1._Code(expr._items.reduce((items, c) => {\n if (c instanceof code_1.Name)\n c = replaceName(c);\n if (c instanceof code_1._Code)\n items.push(...c._items);\n else\n items.push(c);\n return items;\n }, []));\n function replaceName(n) {\n const c = constants[n.str];\n if (c === undefined || names[n.str] !== 1)\n return n;\n delete names[n.str];\n return c;\n }\n function canOptimize(e) {\n return (e instanceof code_1._Code &&\n e._items.some((c) => c instanceof code_1.Name && names[c.str] === 1 && constants[c.str] !== undefined));\n }\n}\nfunction subtractNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) - (from[n] || 0);\n}\nfunction not(x) {\n return typeof x == \"boolean\" || typeof x == \"number\" || x === null ? !x : (0, code_1._) `!${par(x)}`;\n}\nexports.not = not;\nconst andCode = mappend(exports.operators.AND);\n// boolean AND (&&) expression with the passed arguments\nfunction and(...args) {\n return args.reduce(andCode);\n}\nexports.and = and;\nconst orCode = mappend(exports.operators.OR);\n// boolean OR (||) expression with the passed arguments\nfunction or(...args) {\n return args.reduce(orCode);\n}\nexports.or = or;\nfunction mappend(op) {\n return (x, y) => (x === code_1.nil ? y : y === code_1.nil ? x : (0, code_1._) `${par(x)} ${op} ${par(y)}`);\n}\nfunction par(x) {\n return x instanceof code_1.Name ? x : (0, code_1._) `(${x})`;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ValueScope = exports.ValueScopeName = exports.Scope = exports.varKinds = exports.UsedValueState = void 0;\nconst code_1 = require(\"./code\");\nclass ValueError extends Error {\n constructor(name) {\n super(`CodeGen: \"code\" for ${name} not defined`);\n this.value = name.value;\n }\n}\nvar UsedValueState;\n(function (UsedValueState) {\n UsedValueState[UsedValueState[\"Started\"] = 0] = \"Started\";\n UsedValueState[UsedValueState[\"Completed\"] = 1] = \"Completed\";\n})(UsedValueState = exports.UsedValueState || (exports.UsedValueState = {}));\nexports.varKinds = {\n const: new code_1.Name(\"const\"),\n let: new code_1.Name(\"let\"),\n var: new code_1.Name(\"var\"),\n};\nclass Scope {\n constructor({ prefixes, parent } = {}) {\n this._names = {};\n this._prefixes = prefixes;\n this._parent = parent;\n }\n toName(nameOrPrefix) {\n return nameOrPrefix instanceof code_1.Name ? nameOrPrefix : this.name(nameOrPrefix);\n }\n name(prefix) {\n return new code_1.Name(this._newName(prefix));\n }\n _newName(prefix) {\n const ng = this._names[prefix] || this._nameGroup(prefix);\n return `${prefix}${ng.index++}`;\n }\n _nameGroup(prefix) {\n var _a, _b;\n if (((_b = (_a = this._parent) === null || _a === void 0 ? void 0 : _a._prefixes) === null || _b === void 0 ? void 0 : _b.has(prefix)) || (this._prefixes && !this._prefixes.has(prefix))) {\n throw new Error(`CodeGen: prefix \"${prefix}\" is not allowed in this scope`);\n }\n return (this._names[prefix] = { prefix, index: 0 });\n }\n}\nexports.Scope = Scope;\nclass ValueScopeName extends code_1.Name {\n constructor(prefix, nameStr) {\n super(nameStr);\n this.prefix = prefix;\n }\n setValue(value, { property, itemIndex }) {\n this.value = value;\n this.scopePath = (0, code_1._) `.${new code_1.Name(property)}[${itemIndex}]`;\n }\n}\nexports.ValueScopeName = ValueScopeName;\nconst line = (0, code_1._) `\\n`;\nclass ValueScope extends Scope {\n constructor(opts) {\n super(opts);\n this._values = {};\n this._scope = opts.scope;\n this.opts = { ...opts, _n: opts.lines ? line : code_1.nil };\n }\n get() {\n return this._scope;\n }\n name(prefix) {\n return new ValueScopeName(prefix, this._newName(prefix));\n }\n value(nameOrPrefix, value) {\n var _a;\n if (value.ref === undefined)\n throw new Error(\"CodeGen: ref must be passed in value\");\n const name = this.toName(nameOrPrefix);\n const { prefix } = name;\n const valueKey = (_a = value.key) !== null && _a !== void 0 ? _a : value.ref;\n let vs = this._values[prefix];\n if (vs) {\n const _name = vs.get(valueKey);\n if (_name)\n return _name;\n }\n else {\n vs = this._values[prefix] = new Map();\n }\n vs.set(valueKey, name);\n const s = this._scope[prefix] || (this._scope[prefix] = []);\n const itemIndex = s.length;\n s[itemIndex] = value.ref;\n name.setValue(value, { property: prefix, itemIndex });\n return name;\n }\n getValue(prefix, keyOrRef) {\n const vs = this._values[prefix];\n if (!vs)\n return;\n return vs.get(keyOrRef);\n }\n scopeRefs(scopeName, values = this._values) {\n return this._reduceValues(values, (name) => {\n if (name.scopePath === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return (0, code_1._) `${scopeName}${name.scopePath}`;\n });\n }\n scopeCode(values = this._values, usedValues, getCode) {\n return this._reduceValues(values, (name) => {\n if (name.value === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return name.value.code;\n }, usedValues, getCode);\n }\n _reduceValues(values, valueCode, usedValues = {}, getCode) {\n let code = code_1.nil;\n for (const prefix in values) {\n const vs = values[prefix];\n if (!vs)\n continue;\n const nameSet = (usedValues[prefix] = usedValues[prefix] || new Map());\n vs.forEach((name) => {\n if (nameSet.has(name))\n return;\n nameSet.set(name, UsedValueState.Started);\n let c = valueCode(name);\n if (c) {\n const def = this.opts.es5 ? exports.varKinds.var : exports.varKinds.const;\n code = (0, code_1._) `${code}${def} ${name} = ${c};${this.opts._n}`;\n }\n else if ((c = getCode === null || getCode === void 0 ? void 0 : getCode(name))) {\n code = (0, code_1._) `${code}${c}${this.opts._n}`;\n }\n else {\n throw new ValueError(name);\n }\n nameSet.set(name, UsedValueState.Completed);\n });\n }\n return code;\n }\n}\nexports.ValueScope = ValueScope;\n//# sourceMappingURL=scope.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendErrors = exports.resetErrorsCount = exports.reportExtraError = exports.reportError = exports.keyword$DataError = exports.keywordError = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst util_1 = require(\"./util\");\nconst names_1 = require(\"./names\");\nexports.keywordError = {\n message: ({ keyword }) => (0, codegen_1.str) `must pass \"${keyword}\" keyword validation`,\n};\nexports.keyword$DataError = {\n message: ({ keyword, schemaType }) => schemaType\n ? (0, codegen_1.str) `\"${keyword}\" keyword must be ${schemaType} ($data)`\n : (0, codegen_1.str) `\"${keyword}\" keyword is invalid ($data)`,\n};\nfunction reportError(cxt, error = exports.keywordError, errorPaths, overrideAllErrors) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n if (overrideAllErrors !== null && overrideAllErrors !== void 0 ? overrideAllErrors : (compositeRule || allErrors)) {\n addError(gen, errObj);\n }\n else {\n returnErrors(it, (0, codegen_1._) `[${errObj}]`);\n }\n}\nexports.reportError = reportError;\nfunction reportExtraError(cxt, error = exports.keywordError, errorPaths) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n addError(gen, errObj);\n if (!(compositeRule || allErrors)) {\n returnErrors(it, names_1.default.vErrors);\n }\n}\nexports.reportExtraError = reportExtraError;\nfunction resetErrorsCount(gen, errsCount) {\n gen.assign(names_1.default.errors, errsCount);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} !== null`, () => gen.if(errsCount, () => gen.assign((0, codegen_1._) `${names_1.default.vErrors}.length`, errsCount), () => gen.assign(names_1.default.vErrors, null)));\n}\nexports.resetErrorsCount = resetErrorsCount;\nfunction extendErrors({ gen, keyword, schemaValue, data, errsCount, it, }) {\n /* istanbul ignore if */\n if (errsCount === undefined)\n throw new Error(\"ajv implementation error\");\n const err = gen.name(\"err\");\n gen.forRange(\"i\", errsCount, names_1.default.errors, (i) => {\n gen.const(err, (0, codegen_1._) `${names_1.default.vErrors}[${i}]`);\n gen.if((0, codegen_1._) `${err}.instancePath === undefined`, () => gen.assign((0, codegen_1._) `${err}.instancePath`, (0, codegen_1.strConcat)(names_1.default.instancePath, it.errorPath)));\n gen.assign((0, codegen_1._) `${err}.schemaPath`, (0, codegen_1.str) `${it.errSchemaPath}/${keyword}`);\n if (it.opts.verbose) {\n gen.assign((0, codegen_1._) `${err}.schema`, schemaValue);\n gen.assign((0, codegen_1._) `${err}.data`, data);\n }\n });\n}\nexports.extendErrors = extendErrors;\nfunction addError(gen, errObj) {\n const err = gen.const(\"err\", errObj);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} === null`, () => gen.assign(names_1.default.vErrors, (0, codegen_1._) `[${err}]`), (0, codegen_1._) `${names_1.default.vErrors}.push(${err})`);\n gen.code((0, codegen_1._) `${names_1.default.errors}++`);\n}\nfunction returnErrors(it, errs) {\n const { gen, validateName, schemaEnv } = it;\n if (schemaEnv.$async) {\n gen.throw((0, codegen_1._) `new ${it.ValidationError}(${errs})`);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, errs);\n gen.return(false);\n }\n}\nconst E = {\n keyword: new codegen_1.Name(\"keyword\"),\n schemaPath: new codegen_1.Name(\"schemaPath\"),\n params: new codegen_1.Name(\"params\"),\n propertyName: new codegen_1.Name(\"propertyName\"),\n message: new codegen_1.Name(\"message\"),\n schema: new codegen_1.Name(\"schema\"),\n parentSchema: new codegen_1.Name(\"parentSchema\"),\n};\nfunction errorObjectCode(cxt, error, errorPaths) {\n const { createErrors } = cxt.it;\n if (createErrors === false)\n return (0, codegen_1._) `{}`;\n return errorObject(cxt, error, errorPaths);\n}\nfunction errorObject(cxt, error, errorPaths = {}) {\n const { gen, it } = cxt;\n const keyValues = [\n errorInstancePath(it, errorPaths),\n errorSchemaPath(cxt, errorPaths),\n ];\n extraErrorProps(cxt, error, keyValues);\n return gen.object(...keyValues);\n}\nfunction errorInstancePath({ errorPath }, { instancePath }) {\n const instPath = instancePath\n ? (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(instancePath, util_1.Type.Str)}`\n : errorPath;\n return [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, instPath)];\n}\nfunction errorSchemaPath({ keyword, it: { errSchemaPath } }, { schemaPath, parentSchema }) {\n let schPath = parentSchema ? errSchemaPath : (0, codegen_1.str) `${errSchemaPath}/${keyword}`;\n if (schemaPath) {\n schPath = (0, codegen_1.str) `${schPath}${(0, util_1.getErrorPath)(schemaPath, util_1.Type.Str)}`;\n }\n return [E.schemaPath, schPath];\n}\nfunction extraErrorProps(cxt, { params, message }, keyValues) {\n const { keyword, data, schemaValue, it } = cxt;\n const { opts, propertyName, topSchemaRef, schemaPath } = it;\n keyValues.push([E.keyword, keyword], [E.params, typeof params == \"function\" ? params(cxt) : params || (0, codegen_1._) `{}`]);\n if (opts.messages) {\n keyValues.push([E.message, typeof message == \"function\" ? message(cxt) : message]);\n }\n if (opts.verbose) {\n keyValues.push([E.schema, schemaValue], [E.parentSchema, (0, codegen_1._) `${topSchemaRef}${schemaPath}`], [names_1.default.data, data]);\n }\n if (propertyName)\n keyValues.push([E.propertyName, propertyName]);\n}\n//# sourceMappingURL=errors.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.resolveSchema = exports.getCompilingSchema = exports.resolveRef = exports.compileSchema = exports.SchemaEnv = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst validation_error_1 = require(\"../runtime/validation_error\");\nconst names_1 = require(\"./names\");\nconst resolve_1 = require(\"./resolve\");\nconst util_1 = require(\"./util\");\nconst validate_1 = require(\"./validate\");\nclass SchemaEnv {\n constructor(env) {\n var _a;\n this.refs = {};\n this.dynamicAnchors = {};\n let schema;\n if (typeof env.schema == \"object\")\n schema = env.schema;\n this.schema = env.schema;\n this.schemaId = env.schemaId;\n this.root = env.root || this;\n this.baseId = (_a = env.baseId) !== null && _a !== void 0 ? _a : (0, resolve_1.normalizeId)(schema === null || schema === void 0 ? void 0 : schema[env.schemaId || \"$id\"]);\n this.schemaPath = env.schemaPath;\n this.localRefs = env.localRefs;\n this.meta = env.meta;\n this.$async = schema === null || schema === void 0 ? void 0 : schema.$async;\n this.refs = {};\n }\n}\nexports.SchemaEnv = SchemaEnv;\n// let codeSize = 0\n// let nodeCount = 0\n// Compiles schema in SchemaEnv\nfunction compileSchema(sch) {\n // TODO refactor - remove compilations\n const _sch = getCompilingSchema.call(this, sch);\n if (_sch)\n return _sch;\n const rootId = (0, resolve_1.getFullPath)(this.opts.uriResolver, sch.root.baseId); // TODO if getFullPath removed 1 tests fails\n const { es5, lines } = this.opts.code;\n const { ownProperties } = this.opts;\n const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties });\n let _ValidationError;\n if (sch.$async) {\n _ValidationError = gen.scopeValue(\"Error\", {\n ref: validation_error_1.default,\n code: (0, codegen_1._) `require(\"ajv/dist/runtime/validation_error\").default`,\n });\n }\n const validateName = gen.scopeName(\"validate\");\n sch.validateName = validateName;\n const schemaCxt = {\n gen,\n allErrors: this.opts.allErrors,\n data: names_1.default.data,\n parentData: names_1.default.parentData,\n parentDataProperty: names_1.default.parentDataProperty,\n dataNames: [names_1.default.data],\n dataPathArr: [codegen_1.nil],\n dataLevel: 0,\n dataTypes: [],\n definedProperties: new Set(),\n topSchemaRef: gen.scopeValue(\"schema\", this.opts.code.source === true\n ? { ref: sch.schema, code: (0, codegen_1.stringify)(sch.schema) }\n : { ref: sch.schema }),\n validateName,\n ValidationError: _ValidationError,\n schema: sch.schema,\n schemaEnv: sch,\n rootId,\n baseId: sch.baseId || rootId,\n schemaPath: codegen_1.nil,\n errSchemaPath: sch.schemaPath || (this.opts.jtd ? \"\" : \"#\"),\n errorPath: (0, codegen_1._) `\"\"`,\n opts: this.opts,\n self: this,\n };\n let sourceCode;\n try {\n this._compilations.add(sch);\n (0, validate_1.validateFunctionCode)(schemaCxt);\n gen.optimize(this.opts.code.optimize);\n // gen.optimize(1)\n const validateCode = gen.toString();\n sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${validateCode}`;\n // console.log((codeSize += sourceCode.length), (nodeCount += gen.nodeCount))\n if (this.opts.code.process)\n sourceCode = this.opts.code.process(sourceCode, sch);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode)\n const makeValidate = new Function(`${names_1.default.self}`, `${names_1.default.scope}`, sourceCode);\n const validate = makeValidate(this, this.scope.get());\n this.scope.value(validateName, { ref: validate });\n validate.errors = null;\n validate.schema = sch.schema;\n validate.schemaEnv = sch;\n if (sch.$async)\n validate.$async = true;\n if (this.opts.code.source === true) {\n validate.source = { validateName, validateCode, scopeValues: gen._values };\n }\n if (this.opts.unevaluated) {\n const { props, items } = schemaCxt;\n validate.evaluated = {\n props: props instanceof codegen_1.Name ? undefined : props,\n items: items instanceof codegen_1.Name ? undefined : items,\n dynamicProps: props instanceof codegen_1.Name,\n dynamicItems: items instanceof codegen_1.Name,\n };\n if (validate.source)\n validate.source.evaluated = (0, codegen_1.stringify)(validate.evaluated);\n }\n sch.validate = validate;\n return sch;\n }\n catch (e) {\n delete sch.validate;\n delete sch.validateName;\n if (sourceCode)\n this.logger.error(\"Error compiling schema, function code:\", sourceCode);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode, this.opts)\n throw e;\n }\n finally {\n this._compilations.delete(sch);\n }\n}\nexports.compileSchema = compileSchema;\nfunction resolveRef(root, baseId, ref) {\n var _a;\n ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, ref);\n const schOrFunc = root.refs[ref];\n if (schOrFunc)\n return schOrFunc;\n let _sch = resolve.call(this, root, ref);\n if (_sch === undefined) {\n const schema = (_a = root.localRefs) === null || _a === void 0 ? void 0 : _a[ref]; // TODO maybe localRefs should hold SchemaEnv\n const { schemaId } = this.opts;\n if (schema)\n _sch = new SchemaEnv({ schema, schemaId, root, baseId });\n }\n if (_sch === undefined)\n return;\n return (root.refs[ref] = inlineOrCompile.call(this, _sch));\n}\nexports.resolveRef = resolveRef;\nfunction inlineOrCompile(sch) {\n if ((0, resolve_1.inlineRef)(sch.schema, this.opts.inlineRefs))\n return sch.schema;\n return sch.validate ? sch : compileSchema.call(this, sch);\n}\n// Index of schema compilation in the currently compiled list\nfunction getCompilingSchema(schEnv) {\n for (const sch of this._compilations) {\n if (sameSchemaEnv(sch, schEnv))\n return sch;\n }\n}\nexports.getCompilingSchema = getCompilingSchema;\nfunction sameSchemaEnv(s1, s2) {\n return s1.schema === s2.schema && s1.root === s2.root && s1.baseId === s2.baseId;\n}\n// resolve and compile the references ($ref)\n// TODO returns AnySchemaObject (if the schema can be inlined) or validation function\nfunction resolve(root, // information about the root schema for the current schema\nref // reference to resolve\n) {\n let sch;\n while (typeof (sch = this.refs[ref]) == \"string\")\n ref = sch;\n return sch || this.schemas[ref] || resolveSchema.call(this, root, ref);\n}\n// Resolve schema, its root and baseId\nfunction resolveSchema(root, // root object with properties schema, refs TODO below SchemaEnv is assigned to it\nref // reference to resolve\n) {\n const p = this.opts.uriResolver.parse(ref);\n const refPath = (0, resolve_1._getFullPath)(this.opts.uriResolver, p);\n let baseId = (0, resolve_1.getFullPath)(this.opts.uriResolver, root.baseId, undefined);\n // TODO `Object.keys(root.schema).length > 0` should not be needed - but removing breaks 2 tests\n if (Object.keys(root.schema).length > 0 && refPath === baseId) {\n return getJsonPointer.call(this, p, root);\n }\n const id = (0, resolve_1.normalizeId)(refPath);\n const schOrRef = this.refs[id] || this.schemas[id];\n if (typeof schOrRef == \"string\") {\n const sch = resolveSchema.call(this, root, schOrRef);\n if (typeof (sch === null || sch === void 0 ? void 0 : sch.schema) !== \"object\")\n return;\n return getJsonPointer.call(this, p, sch);\n }\n if (typeof (schOrRef === null || schOrRef === void 0 ? void 0 : schOrRef.schema) !== \"object\")\n return;\n if (!schOrRef.validate)\n compileSchema.call(this, schOrRef);\n if (id === (0, resolve_1.normalizeId)(ref)) {\n const { schema } = schOrRef;\n const { schemaId } = this.opts;\n const schId = schema[schemaId];\n if (schId)\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n return new SchemaEnv({ schema, schemaId, root, baseId });\n }\n return getJsonPointer.call(this, p, schOrRef);\n}\nexports.resolveSchema = resolveSchema;\nconst PREVENT_SCOPE_CHANGE = new Set([\n \"properties\",\n \"patternProperties\",\n \"enum\",\n \"dependencies\",\n \"definitions\",\n]);\nfunction getJsonPointer(parsedRef, { baseId, schema, root }) {\n var _a;\n if (((_a = parsedRef.fragment) === null || _a === void 0 ? void 0 : _a[0]) !== \"/\")\n return;\n for (const part of parsedRef.fragment.slice(1).split(\"/\")) {\n if (typeof schema === \"boolean\")\n return;\n const partSchema = schema[(0, util_1.unescapeFragment)(part)];\n if (partSchema === undefined)\n return;\n schema = partSchema;\n // TODO PREVENT_SCOPE_CHANGE could be defined in keyword def?\n const schId = typeof schema === \"object\" && schema[this.opts.schemaId];\n if (!PREVENT_SCOPE_CHANGE.has(part) && schId) {\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n }\n }\n let env;\n if (typeof schema != \"boolean\" && schema.$ref && !(0, util_1.schemaHasRulesButRef)(schema, this.RULES)) {\n const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref);\n env = resolveSchema.call(this, root, $ref);\n }\n // even though resolution failed we need to return SchemaEnv to throw exception\n // so that compileAsync loads missing schema.\n const { schemaId } = this.opts;\n env = env || new SchemaEnv({ schema, schemaId, root, baseId });\n if (env.schema !== env.root.schema)\n return env;\n return undefined;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"./codegen\");\nconst names = {\n // validation function arguments\n data: new codegen_1.Name(\"data\"),\n // args passed from referencing schema\n valCxt: new codegen_1.Name(\"valCxt\"),\n instancePath: new codegen_1.Name(\"instancePath\"),\n parentData: new codegen_1.Name(\"parentData\"),\n parentDataProperty: new codegen_1.Name(\"parentDataProperty\"),\n rootData: new codegen_1.Name(\"rootData\"),\n dynamicAnchors: new codegen_1.Name(\"dynamicAnchors\"),\n // function scoped variables\n vErrors: new codegen_1.Name(\"vErrors\"),\n errors: new codegen_1.Name(\"errors\"),\n this: new codegen_1.Name(\"this\"),\n // \"globals\"\n self: new codegen_1.Name(\"self\"),\n scope: new codegen_1.Name(\"scope\"),\n // JTD serialize/parse name for JSON string and position\n json: new codegen_1.Name(\"json\"),\n jsonPos: new codegen_1.Name(\"jsonPos\"),\n jsonLen: new codegen_1.Name(\"jsonLen\"),\n jsonPart: new codegen_1.Name(\"jsonPart\"),\n};\nexports.default = names;\n//# sourceMappingURL=names.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst resolve_1 = require(\"./resolve\");\nclass MissingRefError extends Error {\n constructor(resolver, baseId, ref, msg) {\n super(msg || `can't resolve reference ${ref} from id ${baseId}`);\n this.missingRef = (0, resolve_1.resolveUrl)(resolver, baseId, ref);\n this.missingSchema = (0, resolve_1.normalizeId)((0, resolve_1.getFullPath)(resolver, this.missingRef));\n }\n}\nexports.default = MissingRefError;\n//# sourceMappingURL=ref_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getSchemaRefs = exports.resolveUrl = exports.normalizeId = exports._getFullPath = exports.getFullPath = exports.inlineRef = void 0;\nconst util_1 = require(\"./util\");\nconst equal = require(\"fast-deep-equal\");\nconst traverse = require(\"json-schema-traverse\");\n// TODO refactor to use keyword definitions\nconst SIMPLE_INLINED = new Set([\n \"type\",\n \"format\",\n \"pattern\",\n \"maxLength\",\n \"minLength\",\n \"maxProperties\",\n \"minProperties\",\n \"maxItems\",\n \"minItems\",\n \"maximum\",\n \"minimum\",\n \"uniqueItems\",\n \"multipleOf\",\n \"required\",\n \"enum\",\n \"const\",\n]);\nfunction inlineRef(schema, limit = true) {\n if (typeof schema == \"boolean\")\n return true;\n if (limit === true)\n return !hasRef(schema);\n if (!limit)\n return false;\n return countKeys(schema) <= limit;\n}\nexports.inlineRef = inlineRef;\nconst REF_KEYWORDS = new Set([\n \"$ref\",\n \"$recursiveRef\",\n \"$recursiveAnchor\",\n \"$dynamicRef\",\n \"$dynamicAnchor\",\n]);\nfunction hasRef(schema) {\n for (const key in schema) {\n if (REF_KEYWORDS.has(key))\n return true;\n const sch = schema[key];\n if (Array.isArray(sch) && sch.some(hasRef))\n return true;\n if (typeof sch == \"object\" && hasRef(sch))\n return true;\n }\n return false;\n}\nfunction countKeys(schema) {\n let count = 0;\n for (const key in schema) {\n if (key === \"$ref\")\n return Infinity;\n count++;\n if (SIMPLE_INLINED.has(key))\n continue;\n if (typeof schema[key] == \"object\") {\n (0, util_1.eachItem)(schema[key], (sch) => (count += countKeys(sch)));\n }\n if (count === Infinity)\n return Infinity;\n }\n return count;\n}\nfunction getFullPath(resolver, id = \"\", normalize) {\n if (normalize !== false)\n id = normalizeId(id);\n const p = resolver.parse(id);\n return _getFullPath(resolver, p);\n}\nexports.getFullPath = getFullPath;\nfunction _getFullPath(resolver, p) {\n const serialized = resolver.serialize(p);\n return serialized.split(\"#\")[0] + \"#\";\n}\nexports._getFullPath = _getFullPath;\nconst TRAILING_SLASH_HASH = /#\\/?$/;\nfunction normalizeId(id) {\n return id ? id.replace(TRAILING_SLASH_HASH, \"\") : \"\";\n}\nexports.normalizeId = normalizeId;\nfunction resolveUrl(resolver, baseId, id) {\n id = normalizeId(id);\n return resolver.resolve(baseId, id);\n}\nexports.resolveUrl = resolveUrl;\nconst ANCHOR = /^[a-z_][-a-z0-9._]*$/i;\nfunction getSchemaRefs(schema, baseId) {\n if (typeof schema == \"boolean\")\n return {};\n const { schemaId, uriResolver } = this.opts;\n const schId = normalizeId(schema[schemaId] || baseId);\n const baseIds = { \"\": schId };\n const pathPrefix = getFullPath(uriResolver, schId, false);\n const localRefs = {};\n const schemaRefs = new Set();\n traverse(schema, { allKeys: true }, (sch, jsonPtr, _, parentJsonPtr) => {\n if (parentJsonPtr === undefined)\n return;\n const fullPath = pathPrefix + jsonPtr;\n let baseId = baseIds[parentJsonPtr];\n if (typeof sch[schemaId] == \"string\")\n baseId = addRef.call(this, sch[schemaId]);\n addAnchor.call(this, sch.$anchor);\n addAnchor.call(this, sch.$dynamicAnchor);\n baseIds[jsonPtr] = baseId;\n function addRef(ref) {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n const _resolve = this.opts.uriResolver.resolve;\n ref = normalizeId(baseId ? _resolve(baseId, ref) : ref);\n if (schemaRefs.has(ref))\n throw ambiguos(ref);\n schemaRefs.add(ref);\n let schOrRef = this.refs[ref];\n if (typeof schOrRef == \"string\")\n schOrRef = this.refs[schOrRef];\n if (typeof schOrRef == \"object\") {\n checkAmbiguosRef(sch, schOrRef.schema, ref);\n }\n else if (ref !== normalizeId(fullPath)) {\n if (ref[0] === \"#\") {\n checkAmbiguosRef(sch, localRefs[ref], ref);\n localRefs[ref] = sch;\n }\n else {\n this.refs[ref] = fullPath;\n }\n }\n return ref;\n }\n function addAnchor(anchor) {\n if (typeof anchor == \"string\") {\n if (!ANCHOR.test(anchor))\n throw new Error(`invalid anchor \"${anchor}\"`);\n addRef.call(this, `#${anchor}`);\n }\n }\n });\n return localRefs;\n function checkAmbiguosRef(sch1, sch2, ref) {\n if (sch2 !== undefined && !equal(sch1, sch2))\n throw ambiguos(ref);\n }\n function ambiguos(ref) {\n return new Error(`reference \"${ref}\" resolves to more than one schema`);\n }\n}\nexports.getSchemaRefs = getSchemaRefs;\n//# sourceMappingURL=resolve.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRules = exports.isJSONType = void 0;\nconst _jsonTypes = [\"string\", \"number\", \"integer\", \"boolean\", \"null\", \"object\", \"array\"];\nconst jsonTypes = new Set(_jsonTypes);\nfunction isJSONType(x) {\n return typeof x == \"string\" && jsonTypes.has(x);\n}\nexports.isJSONType = isJSONType;\nfunction getRules() {\n const groups = {\n number: { type: \"number\", rules: [] },\n string: { type: \"string\", rules: [] },\n array: { type: \"array\", rules: [] },\n object: { type: \"object\", rules: [] },\n };\n return {\n types: { ...groups, integer: true, boolean: true, null: true },\n rules: [{ rules: [] }, groups.number, groups.string, groups.array, groups.object],\n post: { rules: [] },\n all: {},\n keywords: {},\n };\n}\nexports.getRules = getRules;\n//# sourceMappingURL=rules.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkStrictMode = exports.getErrorPath = exports.Type = exports.useFunc = exports.setEvaluated = exports.evaluatedPropsToName = exports.mergeEvaluated = exports.eachItem = exports.unescapeJsonPointer = exports.escapeJsonPointer = exports.escapeFragment = exports.unescapeFragment = exports.schemaRefOrVal = exports.schemaHasRulesButRef = exports.schemaHasRules = exports.checkUnknownRules = exports.alwaysValidSchema = exports.toHash = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst code_1 = require(\"./codegen/code\");\n// TODO refactor to use Set\nfunction toHash(arr) {\n const hash = {};\n for (const item of arr)\n hash[item] = true;\n return hash;\n}\nexports.toHash = toHash;\nfunction alwaysValidSchema(it, schema) {\n if (typeof schema == \"boolean\")\n return schema;\n if (Object.keys(schema).length === 0)\n return true;\n checkUnknownRules(it, schema);\n return !schemaHasRules(schema, it.self.RULES.all);\n}\nexports.alwaysValidSchema = alwaysValidSchema;\nfunction checkUnknownRules(it, schema = it.schema) {\n const { opts, self } = it;\n if (!opts.strictSchema)\n return;\n if (typeof schema === \"boolean\")\n return;\n const rules = self.RULES.keywords;\n for (const key in schema) {\n if (!rules[key])\n checkStrictMode(it, `unknown keyword: \"${key}\"`);\n }\n}\nexports.checkUnknownRules = checkUnknownRules;\nfunction schemaHasRules(schema, rules) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (rules[key])\n return true;\n return false;\n}\nexports.schemaHasRules = schemaHasRules;\nfunction schemaHasRulesButRef(schema, RULES) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (key !== \"$ref\" && RULES.all[key])\n return true;\n return false;\n}\nexports.schemaHasRulesButRef = schemaHasRulesButRef;\nfunction schemaRefOrVal({ topSchemaRef, schemaPath }, schema, keyword, $data) {\n if (!$data) {\n if (typeof schema == \"number\" || typeof schema == \"boolean\")\n return schema;\n if (typeof schema == \"string\")\n return (0, codegen_1._) `${schema}`;\n }\n return (0, codegen_1._) `${topSchemaRef}${schemaPath}${(0, codegen_1.getProperty)(keyword)}`;\n}\nexports.schemaRefOrVal = schemaRefOrVal;\nfunction unescapeFragment(str) {\n return unescapeJsonPointer(decodeURIComponent(str));\n}\nexports.unescapeFragment = unescapeFragment;\nfunction escapeFragment(str) {\n return encodeURIComponent(escapeJsonPointer(str));\n}\nexports.escapeFragment = escapeFragment;\nfunction escapeJsonPointer(str) {\n if (typeof str == \"number\")\n return `${str}`;\n return str.replace(/~/g, \"~0\").replace(/\\//g, \"~1\");\n}\nexports.escapeJsonPointer = escapeJsonPointer;\nfunction unescapeJsonPointer(str) {\n return str.replace(/~1/g, \"/\").replace(/~0/g, \"~\");\n}\nexports.unescapeJsonPointer = unescapeJsonPointer;\nfunction eachItem(xs, f) {\n if (Array.isArray(xs)) {\n for (const x of xs)\n f(x);\n }\n else {\n f(xs);\n }\n}\nexports.eachItem = eachItem;\nfunction makeMergeEvaluated({ mergeNames, mergeToName, mergeValues, resultToName, }) {\n return (gen, from, to, toName) => {\n const res = to === undefined\n ? from\n : to instanceof codegen_1.Name\n ? (from instanceof codegen_1.Name ? mergeNames(gen, from, to) : mergeToName(gen, from, to), to)\n : from instanceof codegen_1.Name\n ? (mergeToName(gen, to, from), from)\n : mergeValues(from, to);\n return toName === codegen_1.Name && !(res instanceof codegen_1.Name) ? resultToName(gen, res) : res;\n };\n}\nexports.mergeEvaluated = {\n props: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => {\n gen.if((0, codegen_1._) `${from} === true`, () => gen.assign(to, true), () => gen.assign(to, (0, codegen_1._) `${to} || {}`).code((0, codegen_1._) `Object.assign(${to}, ${from})`));\n }),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => {\n if (from === true) {\n gen.assign(to, true);\n }\n else {\n gen.assign(to, (0, codegen_1._) `${to} || {}`);\n setEvaluated(gen, to, from);\n }\n }),\n mergeValues: (from, to) => (from === true ? true : { ...from, ...to }),\n resultToName: evaluatedPropsToName,\n }),\n items: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => gen.assign(to, (0, codegen_1._) `${from} === true ? true : ${to} > ${from} ? ${to} : ${from}`)),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => gen.assign(to, from === true ? true : (0, codegen_1._) `${to} > ${from} ? ${to} : ${from}`)),\n mergeValues: (from, to) => (from === true ? true : Math.max(from, to)),\n resultToName: (gen, items) => gen.var(\"items\", items),\n }),\n};\nfunction evaluatedPropsToName(gen, ps) {\n if (ps === true)\n return gen.var(\"props\", true);\n const props = gen.var(\"props\", (0, codegen_1._) `{}`);\n if (ps !== undefined)\n setEvaluated(gen, props, ps);\n return props;\n}\nexports.evaluatedPropsToName = evaluatedPropsToName;\nfunction setEvaluated(gen, props, ps) {\n Object.keys(ps).forEach((p) => gen.assign((0, codegen_1._) `${props}${(0, codegen_1.getProperty)(p)}`, true));\n}\nexports.setEvaluated = setEvaluated;\nconst snippets = {};\nfunction useFunc(gen, f) {\n return gen.scopeValue(\"func\", {\n ref: f,\n code: snippets[f.code] || (snippets[f.code] = new code_1._Code(f.code)),\n });\n}\nexports.useFunc = useFunc;\nvar Type;\n(function (Type) {\n Type[Type[\"Num\"] = 0] = \"Num\";\n Type[Type[\"Str\"] = 1] = \"Str\";\n})(Type = exports.Type || (exports.Type = {}));\nfunction getErrorPath(dataProp, dataPropType, jsPropertySyntax) {\n // let path\n if (dataProp instanceof codegen_1.Name) {\n const isNumber = dataPropType === Type.Num;\n return jsPropertySyntax\n ? isNumber\n ? (0, codegen_1._) `\"[\" + ${dataProp} + \"]\"`\n : (0, codegen_1._) `\"['\" + ${dataProp} + \"']\"`\n : isNumber\n ? (0, codegen_1._) `\"/\" + ${dataProp}`\n : (0, codegen_1._) `\"/\" + ${dataProp}.replace(/~/g, \"~0\").replace(/\\\\//g, \"~1\")`; // TODO maybe use global escapePointer\n }\n return jsPropertySyntax ? (0, codegen_1.getProperty)(dataProp).toString() : \"/\" + escapeJsonPointer(dataProp);\n}\nexports.getErrorPath = getErrorPath;\nfunction checkStrictMode(it, msg, mode = it.opts.strictSchema) {\n if (!mode)\n return;\n msg = `strict mode: ${msg}`;\n if (mode === true)\n throw new Error(msg);\n it.self.logger.warn(msg);\n}\nexports.checkStrictMode = checkStrictMode;\n//# sourceMappingURL=util.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.shouldUseRule = exports.shouldUseGroup = exports.schemaHasRulesForType = void 0;\nfunction schemaHasRulesForType({ schema, self }, type) {\n const group = self.RULES.types[type];\n return group && group !== true && shouldUseGroup(schema, group);\n}\nexports.schemaHasRulesForType = schemaHasRulesForType;\nfunction shouldUseGroup(schema, group) {\n return group.rules.some((rule) => shouldUseRule(schema, rule));\n}\nexports.shouldUseGroup = shouldUseGroup;\nfunction shouldUseRule(schema, rule) {\n var _a;\n return (schema[rule.keyword] !== undefined ||\n ((_a = rule.definition.implements) === null || _a === void 0 ? void 0 : _a.some((kwd) => schema[kwd] !== undefined)));\n}\nexports.shouldUseRule = shouldUseRule;\n//# sourceMappingURL=applicability.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.boolOrEmptySchema = exports.topBoolOrEmptySchema = void 0;\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst boolError = {\n message: \"boolean schema is false\",\n};\nfunction topBoolOrEmptySchema(it) {\n const { gen, schema, validateName } = it;\n if (schema === false) {\n falseSchemaError(it, false);\n }\n else if (typeof schema == \"object\" && schema.$async === true) {\n gen.return(names_1.default.data);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, null);\n gen.return(true);\n }\n}\nexports.topBoolOrEmptySchema = topBoolOrEmptySchema;\nfunction boolOrEmptySchema(it, valid) {\n const { gen, schema } = it;\n if (schema === false) {\n gen.var(valid, false); // TODO var\n falseSchemaError(it);\n }\n else {\n gen.var(valid, true); // TODO var\n }\n}\nexports.boolOrEmptySchema = boolOrEmptySchema;\nfunction falseSchemaError(it, overrideAllErrors) {\n const { gen, data } = it;\n // TODO maybe some other interface should be used for non-keyword validation errors...\n const cxt = {\n gen,\n keyword: \"false schema\",\n data,\n schema: false,\n schemaCode: false,\n schemaValue: false,\n params: {},\n it,\n };\n (0, errors_1.reportError)(cxt, boolError, undefined, overrideAllErrors);\n}\n//# sourceMappingURL=boolSchema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0;\nconst rules_1 = require(\"../rules\");\nconst applicability_1 = require(\"./applicability\");\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nvar DataType;\n(function (DataType) {\n DataType[DataType[\"Correct\"] = 0] = \"Correct\";\n DataType[DataType[\"Wrong\"] = 1] = \"Wrong\";\n})(DataType = exports.DataType || (exports.DataType = {}));\nfunction getSchemaTypes(schema) {\n const types = getJSONTypes(schema.type);\n const hasNull = types.includes(\"null\");\n if (hasNull) {\n if (schema.nullable === false)\n throw new Error(\"type: null contradicts nullable: false\");\n }\n else {\n if (!types.length && schema.nullable !== undefined) {\n throw new Error('\"nullable\" cannot be used without \"type\"');\n }\n if (schema.nullable === true)\n types.push(\"null\");\n }\n return types;\n}\nexports.getSchemaTypes = getSchemaTypes;\nfunction getJSONTypes(ts) {\n const types = Array.isArray(ts) ? ts : ts ? [ts] : [];\n if (types.every(rules_1.isJSONType))\n return types;\n throw new Error(\"type must be JSONType or JSONType[]: \" + types.join(\",\"));\n}\nexports.getJSONTypes = getJSONTypes;\nfunction coerceAndCheckDataType(it, types) {\n const { gen, data, opts } = it;\n const coerceTo = coerceToTypes(types, opts.coerceTypes);\n const checkTypes = types.length > 0 &&\n !(coerceTo.length === 0 && types.length === 1 && (0, applicability_1.schemaHasRulesForType)(it, types[0]));\n if (checkTypes) {\n const wrongType = checkDataTypes(types, data, opts.strictNumbers, DataType.Wrong);\n gen.if(wrongType, () => {\n if (coerceTo.length)\n coerceData(it, types, coerceTo);\n else\n reportTypeError(it);\n });\n }\n return checkTypes;\n}\nexports.coerceAndCheckDataType = coerceAndCheckDataType;\nconst COERCIBLE = new Set([\"string\", \"number\", \"integer\", \"boolean\", \"null\"]);\nfunction coerceToTypes(types, coerceTypes) {\n return coerceTypes\n ? types.filter((t) => COERCIBLE.has(t) || (coerceTypes === \"array\" && t === \"array\"))\n : [];\n}\nfunction coerceData(it, types, coerceTo) {\n const { gen, data, opts } = it;\n const dataType = gen.let(\"dataType\", (0, codegen_1._) `typeof ${data}`);\n const coerced = gen.let(\"coerced\", (0, codegen_1._) `undefined`);\n if (opts.coerceTypes === \"array\") {\n gen.if((0, codegen_1._) `${dataType} == 'object' && Array.isArray(${data}) && ${data}.length == 1`, () => gen\n .assign(data, (0, codegen_1._) `${data}[0]`)\n .assign(dataType, (0, codegen_1._) `typeof ${data}`)\n .if(checkDataTypes(types, data, opts.strictNumbers), () => gen.assign(coerced, data)));\n }\n gen.if((0, codegen_1._) `${coerced} !== undefined`);\n for (const t of coerceTo) {\n if (COERCIBLE.has(t) || (t === \"array\" && opts.coerceTypes === \"array\")) {\n coerceSpecificType(t);\n }\n }\n gen.else();\n reportTypeError(it);\n gen.endIf();\n gen.if((0, codegen_1._) `${coerced} !== undefined`, () => {\n gen.assign(data, coerced);\n assignParentData(it, coerced);\n });\n function coerceSpecificType(t) {\n switch (t) {\n case \"string\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"number\" || ${dataType} == \"boolean\"`)\n .assign(coerced, (0, codegen_1._) `\"\" + ${data}`)\n .elseIf((0, codegen_1._) `${data} === null`)\n .assign(coerced, (0, codegen_1._) `\"\"`);\n return;\n case \"number\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"boolean\" || ${data} === null\n || (${dataType} == \"string\" && ${data} && ${data} == +${data})`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"integer\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"boolean\" || ${data} === null\n || (${dataType} === \"string\" && ${data} && ${data} == +${data} && !(${data} % 1))`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"boolean\":\n gen\n .elseIf((0, codegen_1._) `${data} === \"false\" || ${data} === 0 || ${data} === null`)\n .assign(coerced, false)\n .elseIf((0, codegen_1._) `${data} === \"true\" || ${data} === 1`)\n .assign(coerced, true);\n return;\n case \"null\":\n gen.elseIf((0, codegen_1._) `${data} === \"\" || ${data} === 0 || ${data} === false`);\n gen.assign(coerced, null);\n return;\n case \"array\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"string\" || ${dataType} === \"number\"\n || ${dataType} === \"boolean\" || ${data} === null`)\n .assign(coerced, (0, codegen_1._) `[${data}]`);\n }\n }\n}\nfunction assignParentData({ gen, parentData, parentDataProperty }, expr) {\n // TODO use gen.property\n gen.if((0, codegen_1._) `${parentData} !== undefined`, () => gen.assign((0, codegen_1._) `${parentData}[${parentDataProperty}]`, expr));\n}\nfunction checkDataType(dataType, data, strictNums, correct = DataType.Correct) {\n const EQ = correct === DataType.Correct ? codegen_1.operators.EQ : codegen_1.operators.NEQ;\n let cond;\n switch (dataType) {\n case \"null\":\n return (0, codegen_1._) `${data} ${EQ} null`;\n case \"array\":\n cond = (0, codegen_1._) `Array.isArray(${data})`;\n break;\n case \"object\":\n cond = (0, codegen_1._) `${data} && typeof ${data} == \"object\" && !Array.isArray(${data})`;\n break;\n case \"integer\":\n cond = numCond((0, codegen_1._) `!(${data} % 1) && !isNaN(${data})`);\n break;\n case \"number\":\n cond = numCond();\n break;\n default:\n return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`;\n }\n return correct === DataType.Correct ? cond : (0, codegen_1.not)(cond);\n function numCond(_cond = codegen_1.nil) {\n return (0, codegen_1.and)((0, codegen_1._) `typeof ${data} == \"number\"`, _cond, strictNums ? (0, codegen_1._) `isFinite(${data})` : codegen_1.nil);\n }\n}\nexports.checkDataType = checkDataType;\nfunction checkDataTypes(dataTypes, data, strictNums, correct) {\n if (dataTypes.length === 1) {\n return checkDataType(dataTypes[0], data, strictNums, correct);\n }\n let cond;\n const types = (0, util_1.toHash)(dataTypes);\n if (types.array && types.object) {\n const notObj = (0, codegen_1._) `typeof ${data} != \"object\"`;\n cond = types.null ? notObj : (0, codegen_1._) `!${data} || ${notObj}`;\n delete types.null;\n delete types.array;\n delete types.object;\n }\n else {\n cond = codegen_1.nil;\n }\n if (types.number)\n delete types.integer;\n for (const t in types)\n cond = (0, codegen_1.and)(cond, checkDataType(t, data, strictNums, correct));\n return cond;\n}\nexports.checkDataTypes = checkDataTypes;\nconst typeError = {\n message: ({ schema }) => `must be ${schema}`,\n params: ({ schema, schemaValue }) => typeof schema == \"string\" ? (0, codegen_1._) `{type: ${schema}}` : (0, codegen_1._) `{type: ${schemaValue}}`,\n};\nfunction reportTypeError(it) {\n const cxt = getTypeErrorContext(it);\n (0, errors_1.reportError)(cxt, typeError);\n}\nexports.reportTypeError = reportTypeError;\nfunction getTypeErrorContext(it) {\n const { gen, data, schema } = it;\n const schemaCode = (0, util_1.schemaRefOrVal)(it, schema, \"type\");\n return {\n gen,\n keyword: \"type\",\n data,\n schema: schema.type,\n schemaCode,\n schemaValue: schemaCode,\n parentSchema: schema,\n params: {},\n it,\n };\n}\n//# sourceMappingURL=dataType.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.assignDefaults = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction assignDefaults(it, ty) {\n const { properties, items } = it.schema;\n if (ty === \"object\" && properties) {\n for (const key in properties) {\n assignDefault(it, key, properties[key].default);\n }\n }\n else if (ty === \"array\" && Array.isArray(items)) {\n items.forEach((sch, i) => assignDefault(it, i, sch.default));\n }\n}\nexports.assignDefaults = assignDefaults;\nfunction assignDefault(it, prop, defaultValue) {\n const { gen, compositeRule, data, opts } = it;\n if (defaultValue === undefined)\n return;\n const childData = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(prop)}`;\n if (compositeRule) {\n (0, util_1.checkStrictMode)(it, `default is ignored for: ${childData}`);\n return;\n }\n let condition = (0, codegen_1._) `${childData} === undefined`;\n if (opts.useDefaults === \"empty\") {\n condition = (0, codegen_1._) `${condition} || ${childData} === null || ${childData} === \"\"`;\n }\n // `${childData} === undefined` +\n // (opts.useDefaults === \"empty\" ? ` || ${childData} === null || ${childData} === \"\"` : \"\")\n gen.if(condition, (0, codegen_1._) `${childData} = ${(0, codegen_1.stringify)(defaultValue)}`);\n}\n//# sourceMappingURL=defaults.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getData = exports.KeywordCxt = exports.validateFunctionCode = void 0;\nconst boolSchema_1 = require(\"./boolSchema\");\nconst dataType_1 = require(\"./dataType\");\nconst applicability_1 = require(\"./applicability\");\nconst dataType_2 = require(\"./dataType\");\nconst defaults_1 = require(\"./defaults\");\nconst keyword_1 = require(\"./keyword\");\nconst subschema_1 = require(\"./subschema\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst resolve_1 = require(\"../resolve\");\nconst util_1 = require(\"../util\");\nconst errors_1 = require(\"../errors\");\n// schema compilation - generates validation function, subschemaCode (below) is used for subschemas\nfunction validateFunctionCode(it) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n topSchemaObjCode(it);\n return;\n }\n }\n validateFunction(it, () => (0, boolSchema_1.topBoolOrEmptySchema)(it));\n}\nexports.validateFunctionCode = validateFunctionCode;\nfunction validateFunction({ gen, validateName, schema, schemaEnv, opts }, body) {\n if (opts.code.es5) {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${names_1.default.valCxt}`, schemaEnv.$async, () => {\n gen.code((0, codegen_1._) `\"use strict\"; ${funcSourceUrl(schema, opts)}`);\n destructureValCxtES5(gen, opts);\n gen.code(body);\n });\n }\n else {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${destructureValCxt(opts)}`, schemaEnv.$async, () => gen.code(funcSourceUrl(schema, opts)).code(body));\n }\n}\nfunction destructureValCxt(opts) {\n return (0, codegen_1._) `{${names_1.default.instancePath}=\"\", ${names_1.default.parentData}, ${names_1.default.parentDataProperty}, ${names_1.default.rootData}=${names_1.default.data}${opts.dynamicRef ? (0, codegen_1._) `, ${names_1.default.dynamicAnchors}={}` : codegen_1.nil}}={}`;\n}\nfunction destructureValCxtES5(gen, opts) {\n gen.if(names_1.default.valCxt, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.instancePath}`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentData}`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentDataProperty}`);\n gen.var(names_1.default.rootData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.rootData}`);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.dynamicAnchors}`);\n }, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `\"\"`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.rootData, names_1.default.data);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `{}`);\n });\n}\nfunction topSchemaObjCode(it) {\n const { schema, opts, gen } = it;\n validateFunction(it, () => {\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n checkNoDefault(it);\n gen.let(names_1.default.vErrors, null);\n gen.let(names_1.default.errors, 0);\n if (opts.unevaluated)\n resetEvaluated(it);\n typeAndKeywords(it);\n returnResults(it);\n });\n return;\n}\nfunction resetEvaluated(it) {\n // TODO maybe some hook to execute it in the end to check whether props/items are Name, as in assignEvaluated\n const { gen, validateName } = it;\n it.evaluated = gen.const(\"evaluated\", (0, codegen_1._) `${validateName}.evaluated`);\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicProps`, () => gen.assign((0, codegen_1._) `${it.evaluated}.props`, (0, codegen_1._) `undefined`));\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicItems`, () => gen.assign((0, codegen_1._) `${it.evaluated}.items`, (0, codegen_1._) `undefined`));\n}\nfunction funcSourceUrl(schema, opts) {\n const schId = typeof schema == \"object\" && schema[opts.schemaId];\n return schId && (opts.code.source || opts.code.process) ? (0, codegen_1._) `/*# sourceURL=${schId} */` : codegen_1.nil;\n}\n// schema compilation - this function is used recursively to generate code for sub-schemas\nfunction subschemaCode(it, valid) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n subSchemaObjCode(it, valid);\n return;\n }\n }\n (0, boolSchema_1.boolOrEmptySchema)(it, valid);\n}\nfunction schemaCxtHasRules({ schema, self }) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (self.RULES.all[key])\n return true;\n return false;\n}\nfunction isSchemaObj(it) {\n return typeof it.schema != \"boolean\";\n}\nfunction subSchemaObjCode(it, valid) {\n const { schema, gen, opts } = it;\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n updateContext(it);\n checkAsyncSchema(it);\n const errsCount = gen.const(\"_errs\", names_1.default.errors);\n typeAndKeywords(it, errsCount);\n // TODO var\n gen.var(valid, (0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n}\nfunction checkKeywords(it) {\n (0, util_1.checkUnknownRules)(it);\n checkRefsAndKeywords(it);\n}\nfunction typeAndKeywords(it, errsCount) {\n if (it.opts.jtd)\n return schemaKeywords(it, [], false, errsCount);\n const types = (0, dataType_1.getSchemaTypes)(it.schema);\n const checkedTypes = (0, dataType_1.coerceAndCheckDataType)(it, types);\n schemaKeywords(it, types, !checkedTypes, errsCount);\n}\nfunction checkRefsAndKeywords(it) {\n const { schema, errSchemaPath, opts, self } = it;\n if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1.schemaHasRulesButRef)(schema, self.RULES)) {\n self.logger.warn(`$ref: keywords ignored in schema at path \"${errSchemaPath}\"`);\n }\n}\nfunction checkNoDefault(it) {\n const { schema, opts } = it;\n if (schema.default !== undefined && opts.useDefaults && opts.strictSchema) {\n (0, util_1.checkStrictMode)(it, \"default is ignored in the schema root\");\n }\n}\nfunction updateContext(it) {\n const schId = it.schema[it.opts.schemaId];\n if (schId)\n it.baseId = (0, resolve_1.resolveUrl)(it.opts.uriResolver, it.baseId, schId);\n}\nfunction checkAsyncSchema(it) {\n if (it.schema.$async && !it.schemaEnv.$async)\n throw new Error(\"async schema in sync schema\");\n}\nfunction commentKeyword({ gen, schemaEnv, schema, errSchemaPath, opts }) {\n const msg = schema.$comment;\n if (opts.$comment === true) {\n gen.code((0, codegen_1._) `${names_1.default.self}.logger.log(${msg})`);\n }\n else if (typeof opts.$comment == \"function\") {\n const schemaPath = (0, codegen_1.str) `${errSchemaPath}/$comment`;\n const rootName = gen.scopeValue(\"root\", { ref: schemaEnv.root });\n gen.code((0, codegen_1._) `${names_1.default.self}.opts.$comment(${msg}, ${schemaPath}, ${rootName}.schema)`);\n }\n}\nfunction returnResults(it) {\n const { gen, schemaEnv, validateName, ValidationError, opts } = it;\n if (schemaEnv.$async) {\n // TODO assign unevaluated\n gen.if((0, codegen_1._) `${names_1.default.errors} === 0`, () => gen.return(names_1.default.data), () => gen.throw((0, codegen_1._) `new ${ValidationError}(${names_1.default.vErrors})`));\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, names_1.default.vErrors);\n if (opts.unevaluated)\n assignEvaluated(it);\n gen.return((0, codegen_1._) `${names_1.default.errors} === 0`);\n }\n}\nfunction assignEvaluated({ gen, evaluated, props, items }) {\n if (props instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.props`, props);\n if (items instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.items`, items);\n}\nfunction schemaKeywords(it, types, typeErrors, errsCount) {\n const { gen, schema, data, allErrors, opts, self } = it;\n const { RULES } = self;\n if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1.schemaHasRulesButRef)(schema, RULES))) {\n gen.block(() => keywordCode(it, \"$ref\", RULES.all.$ref.definition)); // TODO typecast\n return;\n }\n if (!opts.jtd)\n checkStrictTypes(it, types);\n gen.block(() => {\n for (const group of RULES.rules)\n groupKeywords(group);\n groupKeywords(RULES.post);\n });\n function groupKeywords(group) {\n if (!(0, applicability_1.shouldUseGroup)(schema, group))\n return;\n if (group.type) {\n gen.if((0, dataType_2.checkDataType)(group.type, data, opts.strictNumbers));\n iterateKeywords(it, group);\n if (types.length === 1 && types[0] === group.type && typeErrors) {\n gen.else();\n (0, dataType_2.reportTypeError)(it);\n }\n gen.endIf();\n }\n else {\n iterateKeywords(it, group);\n }\n // TODO make it \"ok\" call?\n if (!allErrors)\n gen.if((0, codegen_1._) `${names_1.default.errors} === ${errsCount || 0}`);\n }\n}\nfunction iterateKeywords(it, group) {\n const { gen, schema, opts: { useDefaults }, } = it;\n if (useDefaults)\n (0, defaults_1.assignDefaults)(it, group.type);\n gen.block(() => {\n for (const rule of group.rules) {\n if ((0, applicability_1.shouldUseRule)(schema, rule)) {\n keywordCode(it, rule.keyword, rule.definition, group.type);\n }\n }\n });\n}\nfunction checkStrictTypes(it, types) {\n if (it.schemaEnv.meta || !it.opts.strictTypes)\n return;\n checkContextTypes(it, types);\n if (!it.opts.allowUnionTypes)\n checkMultipleTypes(it, types);\n checkKeywordTypes(it, it.dataTypes);\n}\nfunction checkContextTypes(it, types) {\n if (!types.length)\n return;\n if (!it.dataTypes.length) {\n it.dataTypes = types;\n return;\n }\n types.forEach((t) => {\n if (!includesType(it.dataTypes, t)) {\n strictTypesError(it, `type \"${t}\" not allowed by context \"${it.dataTypes.join(\",\")}\"`);\n }\n });\n it.dataTypes = it.dataTypes.filter((t) => includesType(types, t));\n}\nfunction checkMultipleTypes(it, ts) {\n if (ts.length > 1 && !(ts.length === 2 && ts.includes(\"null\"))) {\n strictTypesError(it, \"use allowUnionTypes to allow union type keyword\");\n }\n}\nfunction checkKeywordTypes(it, ts) {\n const rules = it.self.RULES.all;\n for (const keyword in rules) {\n const rule = rules[keyword];\n if (typeof rule == \"object\" && (0, applicability_1.shouldUseRule)(it.schema, rule)) {\n const { type } = rule.definition;\n if (type.length && !type.some((t) => hasApplicableType(ts, t))) {\n strictTypesError(it, `missing type \"${type.join(\",\")}\" for keyword \"${keyword}\"`);\n }\n }\n }\n}\nfunction hasApplicableType(schTs, kwdT) {\n return schTs.includes(kwdT) || (kwdT === \"number\" && schTs.includes(\"integer\"));\n}\nfunction includesType(ts, t) {\n return ts.includes(t) || (t === \"integer\" && ts.includes(\"number\"));\n}\nfunction strictTypesError(it, msg) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n msg += ` at \"${schemaPath}\" (strictTypes)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictTypes);\n}\nclass KeywordCxt {\n constructor(it, def, keyword) {\n (0, keyword_1.validateKeywordUsage)(it, def, keyword);\n this.gen = it.gen;\n this.allErrors = it.allErrors;\n this.keyword = keyword;\n this.data = it.data;\n this.schema = it.schema[keyword];\n this.$data = def.$data && it.opts.$data && this.schema && this.schema.$data;\n this.schemaValue = (0, util_1.schemaRefOrVal)(it, this.schema, keyword, this.$data);\n this.schemaType = def.schemaType;\n this.parentSchema = it.schema;\n this.params = {};\n this.it = it;\n this.def = def;\n if (this.$data) {\n this.schemaCode = it.gen.const(\"vSchema\", getData(this.$data, it));\n }\n else {\n this.schemaCode = this.schemaValue;\n if (!(0, keyword_1.validSchemaType)(this.schema, def.schemaType, def.allowUndefined)) {\n throw new Error(`${keyword} value must be ${JSON.stringify(def.schemaType)}`);\n }\n }\n if (\"code\" in def ? def.trackErrors : def.errors !== false) {\n this.errsCount = it.gen.const(\"_errs\", names_1.default.errors);\n }\n }\n result(condition, successAction, failAction) {\n this.failResult((0, codegen_1.not)(condition), successAction, failAction);\n }\n failResult(condition, successAction, failAction) {\n this.gen.if(condition);\n if (failAction)\n failAction();\n else\n this.error();\n if (successAction) {\n this.gen.else();\n successAction();\n if (this.allErrors)\n this.gen.endIf();\n }\n else {\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n }\n pass(condition, failAction) {\n this.failResult((0, codegen_1.not)(condition), undefined, failAction);\n }\n fail(condition) {\n if (condition === undefined) {\n this.error();\n if (!this.allErrors)\n this.gen.if(false); // this branch will be removed by gen.optimize\n return;\n }\n this.gen.if(condition);\n this.error();\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n fail$data(condition) {\n if (!this.$data)\n return this.fail(condition);\n const { schemaCode } = this;\n this.fail((0, codegen_1._) `${schemaCode} !== undefined && (${(0, codegen_1.or)(this.invalid$data(), condition)})`);\n }\n error(append, errorParams, errorPaths) {\n if (errorParams) {\n this.setParams(errorParams);\n this._error(append, errorPaths);\n this.setParams({});\n return;\n }\n this._error(append, errorPaths);\n }\n _error(append, errorPaths) {\n ;\n (append ? errors_1.reportExtraError : errors_1.reportError)(this, this.def.error, errorPaths);\n }\n $dataError() {\n (0, errors_1.reportError)(this, this.def.$dataError || errors_1.keyword$DataError);\n }\n reset() {\n if (this.errsCount === undefined)\n throw new Error('add \"trackErrors\" to keyword definition');\n (0, errors_1.resetErrorsCount)(this.gen, this.errsCount);\n }\n ok(cond) {\n if (!this.allErrors)\n this.gen.if(cond);\n }\n setParams(obj, assign) {\n if (assign)\n Object.assign(this.params, obj);\n else\n this.params = obj;\n }\n block$data(valid, codeBlock, $dataValid = codegen_1.nil) {\n this.gen.block(() => {\n this.check$data(valid, $dataValid);\n codeBlock();\n });\n }\n check$data(valid = codegen_1.nil, $dataValid = codegen_1.nil) {\n if (!this.$data)\n return;\n const { gen, schemaCode, schemaType, def } = this;\n gen.if((0, codegen_1.or)((0, codegen_1._) `${schemaCode} === undefined`, $dataValid));\n if (valid !== codegen_1.nil)\n gen.assign(valid, true);\n if (schemaType.length || def.validateSchema) {\n gen.elseIf(this.invalid$data());\n this.$dataError();\n if (valid !== codegen_1.nil)\n gen.assign(valid, false);\n }\n gen.else();\n }\n invalid$data() {\n const { gen, schemaCode, schemaType, def, it } = this;\n return (0, codegen_1.or)(wrong$DataType(), invalid$DataSchema());\n function wrong$DataType() {\n if (schemaType.length) {\n /* istanbul ignore if */\n if (!(schemaCode instanceof codegen_1.Name))\n throw new Error(\"ajv implementation error\");\n const st = Array.isArray(schemaType) ? schemaType : [schemaType];\n return (0, codegen_1._) `${(0, dataType_2.checkDataTypes)(st, schemaCode, it.opts.strictNumbers, dataType_2.DataType.Wrong)}`;\n }\n return codegen_1.nil;\n }\n function invalid$DataSchema() {\n if (def.validateSchema) {\n const validateSchemaRef = gen.scopeValue(\"validate$data\", { ref: def.validateSchema }); // TODO value.code for standalone\n return (0, codegen_1._) `!${validateSchemaRef}(${schemaCode})`;\n }\n return codegen_1.nil;\n }\n }\n subschema(appl, valid) {\n const subschema = (0, subschema_1.getSubschema)(this.it, appl);\n (0, subschema_1.extendSubschemaData)(subschema, this.it, appl);\n (0, subschema_1.extendSubschemaMode)(subschema, appl);\n const nextContext = { ...this.it, ...subschema, items: undefined, props: undefined };\n subschemaCode(nextContext, valid);\n return nextContext;\n }\n mergeEvaluated(schemaCxt, toName) {\n const { it, gen } = this;\n if (!it.opts.unevaluated)\n return;\n if (it.props !== true && schemaCxt.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName);\n }\n if (it.items !== true && schemaCxt.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName);\n }\n }\n mergeValidEvaluated(schemaCxt, valid) {\n const { it, gen } = this;\n if (it.opts.unevaluated && (it.props !== true || it.items !== true)) {\n gen.if(valid, () => this.mergeEvaluated(schemaCxt, codegen_1.Name));\n return true;\n }\n }\n}\nexports.KeywordCxt = KeywordCxt;\nfunction keywordCode(it, keyword, def, ruleType) {\n const cxt = new KeywordCxt(it, def, keyword);\n if (\"code\" in def) {\n def.code(cxt, ruleType);\n }\n else if (cxt.$data && def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n else if (\"macro\" in def) {\n (0, keyword_1.macroKeywordCode)(cxt, def);\n }\n else if (def.compile || def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n}\nconst JSON_POINTER = /^\\/(?:[^~]|~0|~1)*$/;\nconst RELATIVE_JSON_POINTER = /^([0-9]+)(#|\\/(?:[^~]|~0|~1)*)?$/;\nfunction getData($data, { dataLevel, dataNames, dataPathArr }) {\n let jsonPointer;\n let data;\n if ($data === \"\")\n return names_1.default.rootData;\n if ($data[0] === \"/\") {\n if (!JSON_POINTER.test($data))\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n jsonPointer = $data;\n data = names_1.default.rootData;\n }\n else {\n const matches = RELATIVE_JSON_POINTER.exec($data);\n if (!matches)\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n const up = +matches[1];\n jsonPointer = matches[2];\n if (jsonPointer === \"#\") {\n if (up >= dataLevel)\n throw new Error(errorMsg(\"property/index\", up));\n return dataPathArr[dataLevel - up];\n }\n if (up > dataLevel)\n throw new Error(errorMsg(\"data\", up));\n data = dataNames[dataLevel - up];\n if (!jsonPointer)\n return data;\n }\n let expr = data;\n const segments = jsonPointer.split(\"/\");\n for (const segment of segments) {\n if (segment) {\n data = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)((0, util_1.unescapeJsonPointer)(segment))}`;\n expr = (0, codegen_1._) `${expr} && ${data}`;\n }\n }\n return expr;\n function errorMsg(pointerType, up) {\n return `Cannot access ${pointerType} ${up} levels up, current level is ${dataLevel}`;\n }\n}\nexports.getData = getData;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateKeywordUsage = exports.validSchemaType = exports.funcKeywordCode = exports.macroKeywordCode = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst code_1 = require(\"../../vocabularies/code\");\nconst errors_1 = require(\"../errors\");\nfunction macroKeywordCode(cxt, def) {\n const { gen, keyword, schema, parentSchema, it } = cxt;\n const macroSchema = def.macro.call(it.self, schema, parentSchema, it);\n const schemaRef = useKeyword(gen, keyword, macroSchema);\n if (it.opts.validateSchema !== false)\n it.self.validateSchema(macroSchema, true);\n const valid = gen.name(\"valid\");\n cxt.subschema({\n schema: macroSchema,\n schemaPath: codegen_1.nil,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n topSchemaRef: schemaRef,\n compositeRule: true,\n }, valid);\n cxt.pass(valid, () => cxt.error(true));\n}\nexports.macroKeywordCode = macroKeywordCode;\nfunction funcKeywordCode(cxt, def) {\n var _a;\n const { gen, keyword, schema, parentSchema, $data, it } = cxt;\n checkAsyncKeyword(it, def);\n const validate = !$data && def.compile ? def.compile.call(it.self, schema, parentSchema, it) : def.validate;\n const validateRef = useKeyword(gen, keyword, validate);\n const valid = gen.let(\"valid\");\n cxt.block$data(valid, validateKeyword);\n cxt.ok((_a = def.valid) !== null && _a !== void 0 ? _a : valid);\n function validateKeyword() {\n if (def.errors === false) {\n assignValid();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => cxt.error());\n }\n else {\n const ruleErrs = def.async ? validateAsync() : validateSync();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => addErrs(cxt, ruleErrs));\n }\n }\n function validateAsync() {\n const ruleErrs = gen.let(\"ruleErrs\", null);\n gen.try(() => assignValid((0, codegen_1._) `await `), (e) => gen.assign(valid, false).if((0, codegen_1._) `${e} instanceof ${it.ValidationError}`, () => gen.assign(ruleErrs, (0, codegen_1._) `${e}.errors`), () => gen.throw(e)));\n return ruleErrs;\n }\n function validateSync() {\n const validateErrs = (0, codegen_1._) `${validateRef}.errors`;\n gen.assign(validateErrs, null);\n assignValid(codegen_1.nil);\n return validateErrs;\n }\n function assignValid(_await = def.async ? (0, codegen_1._) `await ` : codegen_1.nil) {\n const passCxt = it.opts.passContext ? names_1.default.this : names_1.default.self;\n const passSchema = !((\"compile\" in def && !$data) || def.schema === false);\n gen.assign(valid, (0, codegen_1._) `${_await}${(0, code_1.callValidateCode)(cxt, validateRef, passCxt, passSchema)}`, def.modifying);\n }\n function reportErrs(errors) {\n var _a;\n gen.if((0, codegen_1.not)((_a = def.valid) !== null && _a !== void 0 ? _a : valid), errors);\n }\n}\nexports.funcKeywordCode = funcKeywordCode;\nfunction modifyData(cxt) {\n const { gen, data, it } = cxt;\n gen.if(it.parentData, () => gen.assign(data, (0, codegen_1._) `${it.parentData}[${it.parentDataProperty}]`));\n}\nfunction addErrs(cxt, errs) {\n const { gen } = cxt;\n gen.if((0, codegen_1._) `Array.isArray(${errs})`, () => {\n gen\n .assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`)\n .assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n (0, errors_1.extendErrors)(cxt);\n }, () => cxt.error());\n}\nfunction checkAsyncKeyword({ schemaEnv }, def) {\n if (def.async && !schemaEnv.$async)\n throw new Error(\"async keyword in sync schema\");\n}\nfunction useKeyword(gen, keyword, result) {\n if (result === undefined)\n throw new Error(`keyword \"${keyword}\" failed to compile`);\n return gen.scopeValue(\"keyword\", typeof result == \"function\" ? { ref: result } : { ref: result, code: (0, codegen_1.stringify)(result) });\n}\nfunction validSchemaType(schema, schemaType, allowUndefined = false) {\n // TODO add tests\n return (!schemaType.length ||\n schemaType.some((st) => st === \"array\"\n ? Array.isArray(schema)\n : st === \"object\"\n ? schema && typeof schema == \"object\" && !Array.isArray(schema)\n : typeof schema == st || (allowUndefined && typeof schema == \"undefined\")));\n}\nexports.validSchemaType = validSchemaType;\nfunction validateKeywordUsage({ schema, opts, self, errSchemaPath }, def, keyword) {\n /* istanbul ignore if */\n if (Array.isArray(def.keyword) ? !def.keyword.includes(keyword) : def.keyword !== keyword) {\n throw new Error(\"ajv implementation error\");\n }\n const deps = def.dependencies;\n if (deps === null || deps === void 0 ? void 0 : deps.some((kwd) => !Object.prototype.hasOwnProperty.call(schema, kwd))) {\n throw new Error(`parent schema must have dependencies of ${keyword}: ${deps.join(\",\")}`);\n }\n if (def.validateSchema) {\n const valid = def.validateSchema(schema[keyword]);\n if (!valid) {\n const msg = `keyword \"${keyword}\" value is invalid at path \"${errSchemaPath}\": ` +\n self.errorsText(def.validateSchema.errors);\n if (opts.validateSchema === \"log\")\n self.logger.error(msg);\n else\n throw new Error(msg);\n }\n }\n}\nexports.validateKeywordUsage = validateKeywordUsage;\n//# sourceMappingURL=keyword.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendSubschemaMode = exports.extendSubschemaData = exports.getSubschema = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPath, topSchemaRef }) {\n if (keyword !== undefined && schema !== undefined) {\n throw new Error('both \"keyword\" and \"schema\" passed, only one allowed');\n }\n if (keyword !== undefined) {\n const sch = it.schema[keyword];\n return schemaProp === undefined\n ? {\n schema: sch,\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n }\n : {\n schema: sch[schemaProp],\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}${(0, codegen_1.getProperty)(schemaProp)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1.escapeFragment)(schemaProp)}`,\n };\n }\n if (schema !== undefined) {\n if (schemaPath === undefined || errSchemaPath === undefined || topSchemaRef === undefined) {\n throw new Error('\"schemaPath\", \"errSchemaPath\" and \"topSchemaRef\" are required with \"schema\"');\n }\n return {\n schema,\n schemaPath,\n topSchemaRef,\n errSchemaPath,\n };\n }\n throw new Error('either \"keyword\" or \"schema\" must be passed');\n}\nexports.getSubschema = getSubschema;\nfunction extendSubschemaData(subschema, it, { dataProp, dataPropType: dpType, data, dataTypes, propertyName }) {\n if (data !== undefined && dataProp !== undefined) {\n throw new Error('both \"data\" and \"dataProp\" passed, only one allowed');\n }\n const { gen } = it;\n if (dataProp !== undefined) {\n const { errorPath, dataPathArr, opts } = it;\n const nextData = gen.let(\"data\", (0, codegen_1._) `${it.data}${(0, codegen_1.getProperty)(dataProp)}`, true);\n dataContextProps(nextData);\n subschema.errorPath = (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`;\n subschema.parentDataProperty = (0, codegen_1._) `${dataProp}`;\n subschema.dataPathArr = [...dataPathArr, subschema.parentDataProperty];\n }\n if (data !== undefined) {\n const nextData = data instanceof codegen_1.Name ? data : gen.let(\"data\", data, true); // replaceable if used once?\n dataContextProps(nextData);\n if (propertyName !== undefined)\n subschema.propertyName = propertyName;\n // TODO something is possibly wrong here with not changing parentDataProperty and not appending dataPathArr\n }\n if (dataTypes)\n subschema.dataTypes = dataTypes;\n function dataContextProps(_nextData) {\n subschema.data = _nextData;\n subschema.dataLevel = it.dataLevel + 1;\n subschema.dataTypes = [];\n it.definedProperties = new Set();\n subschema.parentData = it.data;\n subschema.dataNames = [...it.dataNames, _nextData];\n }\n}\nexports.extendSubschemaData = extendSubschemaData;\nfunction extendSubschemaMode(subschema, { jtdDiscriminator, jtdMetadata, compositeRule, createErrors, allErrors }) {\n if (compositeRule !== undefined)\n subschema.compositeRule = compositeRule;\n if (createErrors !== undefined)\n subschema.createErrors = createErrors;\n if (allErrors !== undefined)\n subschema.allErrors = allErrors;\n subschema.jtdDiscriminator = jtdDiscriminator; // not inherited\n subschema.jtdMetadata = jtdMetadata; // not inherited\n}\nexports.extendSubschemaMode = extendSubschemaMode;\n//# sourceMappingURL=subschema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\nconst validation_error_1 = require(\"./runtime/validation_error\");\nconst ref_error_1 = require(\"./compile/ref_error\");\nconst rules_1 = require(\"./compile/rules\");\nconst compile_1 = require(\"./compile\");\nconst codegen_2 = require(\"./compile/codegen\");\nconst resolve_1 = require(\"./compile/resolve\");\nconst dataType_1 = require(\"./compile/validate/dataType\");\nconst util_1 = require(\"./compile/util\");\nconst $dataRefSchema = require(\"./refs/data.json\");\nconst uri_1 = require(\"./runtime/uri\");\nconst defaultRegExp = (str, flags) => new RegExp(str, flags);\ndefaultRegExp.code = \"new RegExp\";\nconst META_IGNORE_OPTIONS = [\"removeAdditional\", \"useDefaults\", \"coerceTypes\"];\nconst EXT_SCOPE_NAMES = new Set([\n \"validate\",\n \"serialize\",\n \"parse\",\n \"wrapper\",\n \"root\",\n \"schema\",\n \"keyword\",\n \"pattern\",\n \"formats\",\n \"validate$data\",\n \"func\",\n \"obj\",\n \"Error\",\n]);\nconst removedOptions = {\n errorDataPath: \"\",\n format: \"`validateFormats: false` can be used instead.\",\n nullable: '\"nullable\" keyword is supported by default.',\n jsonPointers: \"Deprecated jsPropertySyntax can be used instead.\",\n extendRefs: \"Deprecated ignoreKeywordsWithRef can be used instead.\",\n missingRefs: \"Pass empty schema with $id that should be ignored to ajv.addSchema.\",\n processCode: \"Use option `code: {process: (code, schemaEnv: object) => string}`\",\n sourceCode: \"Use option `code: {source: true}`\",\n strictDefaults: \"It is default now, see option `strict`.\",\n strictKeywords: \"It is default now, see option `strict`.\",\n uniqueItems: '\"uniqueItems\" keyword is always validated.',\n unknownFormats: \"Disable strict mode or pass `true` to `ajv.addFormat` (or `formats` option).\",\n cache: \"Map is used as cache, schema object as key.\",\n serialize: \"Map is used as cache, schema object as key.\",\n ajvErrors: \"It is default now.\",\n};\nconst deprecatedOptions = {\n ignoreKeywordsWithRef: \"\",\n jsPropertySyntax: \"\",\n unicode: '\"minLength\"/\"maxLength\" account for unicode characters by default.',\n};\nconst MAX_EXPRESSION = 200;\n// eslint-disable-next-line complexity\nfunction requiredOptions(o) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0;\n const s = o.strict;\n const _optz = (_a = o.code) === null || _a === void 0 ? void 0 : _a.optimize;\n const optimize = _optz === true || _optz === undefined ? 1 : _optz || 0;\n const regExp = (_c = (_b = o.code) === null || _b === void 0 ? void 0 : _b.regExp) !== null && _c !== void 0 ? _c : defaultRegExp;\n const uriResolver = (_d = o.uriResolver) !== null && _d !== void 0 ? _d : uri_1.default;\n return {\n strictSchema: (_f = (_e = o.strictSchema) !== null && _e !== void 0 ? _e : s) !== null && _f !== void 0 ? _f : true,\n strictNumbers: (_h = (_g = o.strictNumbers) !== null && _g !== void 0 ? _g : s) !== null && _h !== void 0 ? _h : true,\n strictTypes: (_k = (_j = o.strictTypes) !== null && _j !== void 0 ? _j : s) !== null && _k !== void 0 ? _k : \"log\",\n strictTuples: (_m = (_l = o.strictTuples) !== null && _l !== void 0 ? _l : s) !== null && _m !== void 0 ? _m : \"log\",\n strictRequired: (_p = (_o = o.strictRequired) !== null && _o !== void 0 ? _o : s) !== null && _p !== void 0 ? _p : false,\n code: o.code ? { ...o.code, optimize, regExp } : { optimize, regExp },\n loopRequired: (_q = o.loopRequired) !== null && _q !== void 0 ? _q : MAX_EXPRESSION,\n loopEnum: (_r = o.loopEnum) !== null && _r !== void 0 ? _r : MAX_EXPRESSION,\n meta: (_s = o.meta) !== null && _s !== void 0 ? _s : true,\n messages: (_t = o.messages) !== null && _t !== void 0 ? _t : true,\n inlineRefs: (_u = o.inlineRefs) !== null && _u !== void 0 ? _u : true,\n schemaId: (_v = o.schemaId) !== null && _v !== void 0 ? _v : \"$id\",\n addUsedSchema: (_w = o.addUsedSchema) !== null && _w !== void 0 ? _w : true,\n validateSchema: (_x = o.validateSchema) !== null && _x !== void 0 ? _x : true,\n validateFormats: (_y = o.validateFormats) !== null && _y !== void 0 ? _y : true,\n unicodeRegExp: (_z = o.unicodeRegExp) !== null && _z !== void 0 ? _z : true,\n int32range: (_0 = o.int32range) !== null && _0 !== void 0 ? _0 : true,\n uriResolver: uriResolver,\n };\n}\nclass Ajv {\n constructor(opts = {}) {\n this.schemas = {};\n this.refs = {};\n this.formats = {};\n this._compilations = new Set();\n this._loading = {};\n this._cache = new Map();\n opts = this.opts = { ...opts, ...requiredOptions(opts) };\n const { es5, lines } = this.opts.code;\n this.scope = new codegen_2.ValueScope({ scope: {}, prefixes: EXT_SCOPE_NAMES, es5, lines });\n this.logger = getLogger(opts.logger);\n const formatOpt = opts.validateFormats;\n opts.validateFormats = false;\n this.RULES = (0, rules_1.getRules)();\n checkOptions.call(this, removedOptions, opts, \"NOT SUPPORTED\");\n checkOptions.call(this, deprecatedOptions, opts, \"DEPRECATED\", \"warn\");\n this._metaOpts = getMetaSchemaOptions.call(this);\n if (opts.formats)\n addInitialFormats.call(this);\n this._addVocabularies();\n this._addDefaultMetaSchema();\n if (opts.keywords)\n addInitialKeywords.call(this, opts.keywords);\n if (typeof opts.meta == \"object\")\n this.addMetaSchema(opts.meta);\n addInitialSchemas.call(this);\n opts.validateFormats = formatOpt;\n }\n _addVocabularies() {\n this.addKeyword(\"$async\");\n }\n _addDefaultMetaSchema() {\n const { $data, meta, schemaId } = this.opts;\n let _dataRefSchema = $dataRefSchema;\n if (schemaId === \"id\") {\n _dataRefSchema = { ...$dataRefSchema };\n _dataRefSchema.id = _dataRefSchema.$id;\n delete _dataRefSchema.$id;\n }\n if (meta && $data)\n this.addMetaSchema(_dataRefSchema, _dataRefSchema[schemaId], false);\n }\n defaultMeta() {\n const { meta, schemaId } = this.opts;\n return (this.opts.defaultMeta = typeof meta == \"object\" ? meta[schemaId] || meta : undefined);\n }\n validate(schemaKeyRef, // key, ref or schema object\n data // to be validated\n ) {\n let v;\n if (typeof schemaKeyRef == \"string\") {\n v = this.getSchema(schemaKeyRef);\n if (!v)\n throw new Error(`no schema with key or ref \"${schemaKeyRef}\"`);\n }\n else {\n v = this.compile(schemaKeyRef);\n }\n const valid = v(data);\n if (!(\"$async\" in v))\n this.errors = v.errors;\n return valid;\n }\n compile(schema, _meta) {\n const sch = this._addSchema(schema, _meta);\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n compileAsync(schema, meta) {\n if (typeof this.opts.loadSchema != \"function\") {\n throw new Error(\"options.loadSchema should be a function\");\n }\n const { loadSchema } = this.opts;\n return runCompileAsync.call(this, schema, meta);\n async function runCompileAsync(_schema, _meta) {\n await loadMetaSchema.call(this, _schema.$schema);\n const sch = this._addSchema(_schema, _meta);\n return sch.validate || _compileAsync.call(this, sch);\n }\n async function loadMetaSchema($ref) {\n if ($ref && !this.getSchema($ref)) {\n await runCompileAsync.call(this, { $ref }, true);\n }\n }\n async function _compileAsync(sch) {\n try {\n return this._compileSchemaEnv(sch);\n }\n catch (e) {\n if (!(e instanceof ref_error_1.default))\n throw e;\n checkLoaded.call(this, e);\n await loadMissingSchema.call(this, e.missingSchema);\n return _compileAsync.call(this, sch);\n }\n }\n function checkLoaded({ missingSchema: ref, missingRef }) {\n if (this.refs[ref]) {\n throw new Error(`AnySchema ${ref} is loaded but ${missingRef} cannot be resolved`);\n }\n }\n async function loadMissingSchema(ref) {\n const _schema = await _loadSchema.call(this, ref);\n if (!this.refs[ref])\n await loadMetaSchema.call(this, _schema.$schema);\n if (!this.refs[ref])\n this.addSchema(_schema, ref, meta);\n }\n async function _loadSchema(ref) {\n const p = this._loading[ref];\n if (p)\n return p;\n try {\n return await (this._loading[ref] = loadSchema(ref));\n }\n finally {\n delete this._loading[ref];\n }\n }\n }\n // Adds schema to the instance\n addSchema(schema, // If array is passed, `key` will be ignored\n key, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.\n _meta, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.\n _validateSchema = this.opts.validateSchema // false to skip schema validation. Used internally, option validateSchema should be used instead.\n ) {\n if (Array.isArray(schema)) {\n for (const sch of schema)\n this.addSchema(sch, undefined, _meta, _validateSchema);\n return this;\n }\n let id;\n if (typeof schema === \"object\") {\n const { schemaId } = this.opts;\n id = schema[schemaId];\n if (id !== undefined && typeof id != \"string\") {\n throw new Error(`schema ${schemaId} must be string`);\n }\n }\n key = (0, resolve_1.normalizeId)(key || id);\n this._checkUnique(key);\n this.schemas[key] = this._addSchema(schema, _meta, key, _validateSchema, true);\n return this;\n }\n // Add schema that will be used to validate other schemas\n // options in META_IGNORE_OPTIONS are alway set to false\n addMetaSchema(schema, key, // schema key\n _validateSchema = this.opts.validateSchema // false to skip schema validation, can be used to override validateSchema option for meta-schema\n ) {\n this.addSchema(schema, key, true, _validateSchema);\n return this;\n }\n // Validate schema against its meta-schema\n validateSchema(schema, throwOrLogError) {\n if (typeof schema == \"boolean\")\n return true;\n let $schema;\n $schema = schema.$schema;\n if ($schema !== undefined && typeof $schema != \"string\") {\n throw new Error(\"$schema must be a string\");\n }\n $schema = $schema || this.opts.defaultMeta || this.defaultMeta();\n if (!$schema) {\n this.logger.warn(\"meta-schema not available\");\n this.errors = null;\n return true;\n }\n const valid = this.validate($schema, schema);\n if (!valid && throwOrLogError) {\n const message = \"schema is invalid: \" + this.errorsText();\n if (this.opts.validateSchema === \"log\")\n this.logger.error(message);\n else\n throw new Error(message);\n }\n return valid;\n }\n // Get compiled schema by `key` or `ref`.\n // (`key` that was passed to `addSchema` or full schema reference - `schema.$id` or resolved id)\n getSchema(keyRef) {\n let sch;\n while (typeof (sch = getSchEnv.call(this, keyRef)) == \"string\")\n keyRef = sch;\n if (sch === undefined) {\n const { schemaId } = this.opts;\n const root = new compile_1.SchemaEnv({ schema: {}, schemaId });\n sch = compile_1.resolveSchema.call(this, root, keyRef);\n if (!sch)\n return;\n this.refs[keyRef] = sch;\n }\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n // Remove cached schema(s).\n // If no parameter is passed all schemas but meta-schemas are removed.\n // If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.\n // Even if schema is referenced by other schemas it still can be removed as other schemas have local references.\n removeSchema(schemaKeyRef) {\n if (schemaKeyRef instanceof RegExp) {\n this._removeAllSchemas(this.schemas, schemaKeyRef);\n this._removeAllSchemas(this.refs, schemaKeyRef);\n return this;\n }\n switch (typeof schemaKeyRef) {\n case \"undefined\":\n this._removeAllSchemas(this.schemas);\n this._removeAllSchemas(this.refs);\n this._cache.clear();\n return this;\n case \"string\": {\n const sch = getSchEnv.call(this, schemaKeyRef);\n if (typeof sch == \"object\")\n this._cache.delete(sch.schema);\n delete this.schemas[schemaKeyRef];\n delete this.refs[schemaKeyRef];\n return this;\n }\n case \"object\": {\n const cacheKey = schemaKeyRef;\n this._cache.delete(cacheKey);\n let id = schemaKeyRef[this.opts.schemaId];\n if (id) {\n id = (0, resolve_1.normalizeId)(id);\n delete this.schemas[id];\n delete this.refs[id];\n }\n return this;\n }\n default:\n throw new Error(\"ajv.removeSchema: invalid parameter\");\n }\n }\n // add \"vocabulary\" - a collection of keywords\n addVocabulary(definitions) {\n for (const def of definitions)\n this.addKeyword(def);\n return this;\n }\n addKeyword(kwdOrDef, def // deprecated\n ) {\n let keyword;\n if (typeof kwdOrDef == \"string\") {\n keyword = kwdOrDef;\n if (typeof def == \"object\") {\n this.logger.warn(\"these parameters are deprecated, see docs for addKeyword\");\n def.keyword = keyword;\n }\n }\n else if (typeof kwdOrDef == \"object\" && def === undefined) {\n def = kwdOrDef;\n keyword = def.keyword;\n if (Array.isArray(keyword) && !keyword.length) {\n throw new Error(\"addKeywords: keyword must be string or non-empty array\");\n }\n }\n else {\n throw new Error(\"invalid addKeywords parameters\");\n }\n checkKeyword.call(this, keyword, def);\n if (!def) {\n (0, util_1.eachItem)(keyword, (kwd) => addRule.call(this, kwd));\n return this;\n }\n keywordMetaschema.call(this, def);\n const definition = {\n ...def,\n type: (0, dataType_1.getJSONTypes)(def.type),\n schemaType: (0, dataType_1.getJSONTypes)(def.schemaType),\n };\n (0, util_1.eachItem)(keyword, definition.type.length === 0\n ? (k) => addRule.call(this, k, definition)\n : (k) => definition.type.forEach((t) => addRule.call(this, k, definition, t)));\n return this;\n }\n getKeyword(keyword) {\n const rule = this.RULES.all[keyword];\n return typeof rule == \"object\" ? rule.definition : !!rule;\n }\n // Remove keyword\n removeKeyword(keyword) {\n // TODO return type should be Ajv\n const { RULES } = this;\n delete RULES.keywords[keyword];\n delete RULES.all[keyword];\n for (const group of RULES.rules) {\n const i = group.rules.findIndex((rule) => rule.keyword === keyword);\n if (i >= 0)\n group.rules.splice(i, 1);\n }\n return this;\n }\n // Add format\n addFormat(name, format) {\n if (typeof format == \"string\")\n format = new RegExp(format);\n this.formats[name] = format;\n return this;\n }\n errorsText(errors = this.errors, // optional array of validation errors\n { separator = \", \", dataVar = \"data\" } = {} // optional options with properties `separator` and `dataVar`\n ) {\n if (!errors || errors.length === 0)\n return \"No errors\";\n return errors\n .map((e) => `${dataVar}${e.instancePath} ${e.message}`)\n .reduce((text, msg) => text + separator + msg);\n }\n $dataMetaSchema(metaSchema, keywordsJsonPointers) {\n const rules = this.RULES.all;\n metaSchema = JSON.parse(JSON.stringify(metaSchema));\n for (const jsonPointer of keywordsJsonPointers) {\n const segments = jsonPointer.split(\"/\").slice(1); // first segment is an empty string\n let keywords = metaSchema;\n for (const seg of segments)\n keywords = keywords[seg];\n for (const key in rules) {\n const rule = rules[key];\n if (typeof rule != \"object\")\n continue;\n const { $data } = rule.definition;\n const schema = keywords[key];\n if ($data && schema)\n keywords[key] = schemaOrData(schema);\n }\n }\n return metaSchema;\n }\n _removeAllSchemas(schemas, regex) {\n for (const keyRef in schemas) {\n const sch = schemas[keyRef];\n if (!regex || regex.test(keyRef)) {\n if (typeof sch == \"string\") {\n delete schemas[keyRef];\n }\n else if (sch && !sch.meta) {\n this._cache.delete(sch.schema);\n delete schemas[keyRef];\n }\n }\n }\n }\n _addSchema(schema, meta, baseId, validateSchema = this.opts.validateSchema, addSchema = this.opts.addUsedSchema) {\n let id;\n const { schemaId } = this.opts;\n if (typeof schema == \"object\") {\n id = schema[schemaId];\n }\n else {\n if (this.opts.jtd)\n throw new Error(\"schema must be object\");\n else if (typeof schema != \"boolean\")\n throw new Error(\"schema must be object or boolean\");\n }\n let sch = this._cache.get(schema);\n if (sch !== undefined)\n return sch;\n baseId = (0, resolve_1.normalizeId)(id || baseId);\n const localRefs = resolve_1.getSchemaRefs.call(this, schema, baseId);\n sch = new compile_1.SchemaEnv({ schema, schemaId, meta, baseId, localRefs });\n this._cache.set(sch.schema, sch);\n if (addSchema && !baseId.startsWith(\"#\")) {\n // TODO atm it is allowed to overwrite schemas without id (instead of not adding them)\n if (baseId)\n this._checkUnique(baseId);\n this.refs[baseId] = sch;\n }\n if (validateSchema)\n this.validateSchema(schema, true);\n return sch;\n }\n _checkUnique(id) {\n if (this.schemas[id] || this.refs[id]) {\n throw new Error(`schema with key or id \"${id}\" already exists`);\n }\n }\n _compileSchemaEnv(sch) {\n if (sch.meta)\n this._compileMetaSchema(sch);\n else\n compile_1.compileSchema.call(this, sch);\n /* istanbul ignore if */\n if (!sch.validate)\n throw new Error(\"ajv implementation error\");\n return sch.validate;\n }\n _compileMetaSchema(sch) {\n const currentOpts = this.opts;\n this.opts = this._metaOpts;\n try {\n compile_1.compileSchema.call(this, sch);\n }\n finally {\n this.opts = currentOpts;\n }\n }\n}\nexports.default = Ajv;\nAjv.ValidationError = validation_error_1.default;\nAjv.MissingRefError = ref_error_1.default;\nfunction checkOptions(checkOpts, options, msg, log = \"error\") {\n for (const key in checkOpts) {\n const opt = key;\n if (opt in options)\n this.logger[log](`${msg}: option ${key}. ${checkOpts[opt]}`);\n }\n}\nfunction getSchEnv(keyRef) {\n keyRef = (0, resolve_1.normalizeId)(keyRef); // TODO tests fail without this line\n return this.schemas[keyRef] || this.refs[keyRef];\n}\nfunction addInitialSchemas() {\n const optsSchemas = this.opts.schemas;\n if (!optsSchemas)\n return;\n if (Array.isArray(optsSchemas))\n this.addSchema(optsSchemas);\n else\n for (const key in optsSchemas)\n this.addSchema(optsSchemas[key], key);\n}\nfunction addInitialFormats() {\n for (const name in this.opts.formats) {\n const format = this.opts.formats[name];\n if (format)\n this.addFormat(name, format);\n }\n}\nfunction addInitialKeywords(defs) {\n if (Array.isArray(defs)) {\n this.addVocabulary(defs);\n return;\n }\n this.logger.warn(\"keywords option as map is deprecated, pass array\");\n for (const keyword in defs) {\n const def = defs[keyword];\n if (!def.keyword)\n def.keyword = keyword;\n this.addKeyword(def);\n }\n}\nfunction getMetaSchemaOptions() {\n const metaOpts = { ...this.opts };\n for (const opt of META_IGNORE_OPTIONS)\n delete metaOpts[opt];\n return metaOpts;\n}\nconst noLogs = { log() { }, warn() { }, error() { } };\nfunction getLogger(logger) {\n if (logger === false)\n return noLogs;\n if (logger === undefined)\n return console;\n if (logger.log && logger.warn && logger.error)\n return logger;\n throw new Error(\"logger must implement log, warn and error methods\");\n}\nconst KEYWORD_NAME = /^[a-z_$][a-z0-9_$:-]*$/i;\nfunction checkKeyword(keyword, def) {\n const { RULES } = this;\n (0, util_1.eachItem)(keyword, (kwd) => {\n if (RULES.keywords[kwd])\n throw new Error(`Keyword ${kwd} is already defined`);\n if (!KEYWORD_NAME.test(kwd))\n throw new Error(`Keyword ${kwd} has invalid name`);\n });\n if (!def)\n return;\n if (def.$data && !(\"code\" in def || \"validate\" in def)) {\n throw new Error('$data keyword must have \"code\" or \"validate\" function');\n }\n}\nfunction addRule(keyword, definition, dataType) {\n var _a;\n const post = definition === null || definition === void 0 ? void 0 : definition.post;\n if (dataType && post)\n throw new Error('keyword with \"post\" flag cannot have \"type\"');\n const { RULES } = this;\n let ruleGroup = post ? RULES.post : RULES.rules.find(({ type: t }) => t === dataType);\n if (!ruleGroup) {\n ruleGroup = { type: dataType, rules: [] };\n RULES.rules.push(ruleGroup);\n }\n RULES.keywords[keyword] = true;\n if (!definition)\n return;\n const rule = {\n keyword,\n definition: {\n ...definition,\n type: (0, dataType_1.getJSONTypes)(definition.type),\n schemaType: (0, dataType_1.getJSONTypes)(definition.schemaType),\n },\n };\n if (definition.before)\n addBeforeRule.call(this, ruleGroup, rule, definition.before);\n else\n ruleGroup.rules.push(rule);\n RULES.all[keyword] = rule;\n (_a = definition.implements) === null || _a === void 0 ? void 0 : _a.forEach((kwd) => this.addKeyword(kwd));\n}\nfunction addBeforeRule(ruleGroup, rule, before) {\n const i = ruleGroup.rules.findIndex((_rule) => _rule.keyword === before);\n if (i >= 0) {\n ruleGroup.rules.splice(i, 0, rule);\n }\n else {\n ruleGroup.rules.push(rule);\n this.logger.warn(`rule ${before} is not defined`);\n }\n}\nfunction keywordMetaschema(def) {\n let { metaSchema } = def;\n if (metaSchema === undefined)\n return;\n if (def.$data && this.opts.$data)\n metaSchema = schemaOrData(metaSchema);\n def.validateSchema = this.compile(metaSchema, true);\n}\nconst $dataRef = {\n $ref: \"https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#\",\n};\nfunction schemaOrData(schema) {\n return { anyOf: [schema, $dataRef] };\n}\n//# sourceMappingURL=core.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://github.com/ajv-validator/ajv/issues/889\nconst equal = require(\"fast-deep-equal\");\nequal.code = 'require(\"ajv/dist/runtime/equal\").default';\nexports.default = equal;\n//# sourceMappingURL=equal.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://mathiasbynens.be/notes/javascript-encoding\n// https://github.com/bestiejs/punycode.js - punycode.ucs2.decode\nfunction ucs2length(str) {\n const len = str.length;\n let length = 0;\n let pos = 0;\n let value;\n while (pos < len) {\n length++;\n value = str.charCodeAt(pos++);\n if (value >= 0xd800 && value <= 0xdbff && pos < len) {\n // high surrogate, and there is a next character\n value = str.charCodeAt(pos);\n if ((value & 0xfc00) === 0xdc00)\n pos++; // low surrogate\n }\n }\n return length;\n}\nexports.default = ucs2length;\nucs2length.code = 'require(\"ajv/dist/runtime/ucs2length\").default';\n//# sourceMappingURL=ucs2length.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst uri = require(\"uri-js\");\nuri.code = 'require(\"ajv/dist/runtime/uri\").default';\nexports.default = uri;\n//# sourceMappingURL=uri.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass ValidationError extends Error {\n constructor(errors) {\n super(\"validation failed\");\n this.errors = errors;\n this.ajv = this.validation = true;\n }\n}\nexports.default = ValidationError;\n//# sourceMappingURL=validation_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateAdditionalItems = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"additionalItems\",\n type: \"array\",\n schemaType: [\"boolean\", \"object\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { parentSchema, it } = cxt;\n const { items } = parentSchema;\n if (!Array.isArray(items)) {\n (0, util_1.checkStrictMode)(it, '\"additionalItems\" is ignored when \"items\" is not an array of schemas');\n return;\n }\n validateAdditionalItems(cxt, items);\n },\n};\nfunction validateAdditionalItems(cxt, items) {\n const { gen, schema, data, keyword, it } = cxt;\n it.items = true;\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n if (schema === false) {\n cxt.setParams({ len: items.length });\n cxt.pass((0, codegen_1._) `${len} <= ${items.length}`);\n }\n else if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.var(\"valid\", (0, codegen_1._) `${len} <= ${items.length}`); // TODO var\n gen.if((0, codegen_1.not)(valid), () => validateItems(valid));\n cxt.ok(valid);\n }\n function validateItems(valid) {\n gen.forRange(\"i\", items.length, len, (i) => {\n cxt.subschema({ keyword, dataProp: i, dataPropType: util_1.Type.Num }, valid);\n if (!it.allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n });\n }\n}\nexports.validateAdditionalItems = validateAdditionalItems;\nexports.default = def;\n//# sourceMappingURL=additionalItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must NOT have additional properties\",\n params: ({ params }) => (0, codegen_1._) `{additionalProperty: ${params.additionalProperty}}`,\n};\nconst def = {\n keyword: \"additionalProperties\",\n type: [\"object\"],\n schemaType: [\"boolean\", \"object\"],\n allowUndefined: true,\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, errsCount, it } = cxt;\n /* istanbul ignore if */\n if (!errsCount)\n throw new Error(\"ajv implementation error\");\n const { allErrors, opts } = it;\n it.props = true;\n if (opts.removeAdditional !== \"all\" && (0, util_1.alwaysValidSchema)(it, schema))\n return;\n const props = (0, code_1.allSchemaProperties)(parentSchema.properties);\n const patProps = (0, code_1.allSchemaProperties)(parentSchema.patternProperties);\n checkAdditionalProperties();\n cxt.ok((0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n function checkAdditionalProperties() {\n gen.forIn(\"key\", data, (key) => {\n if (!props.length && !patProps.length)\n additionalPropertyCode(key);\n else\n gen.if(isAdditional(key), () => additionalPropertyCode(key));\n });\n }\n function isAdditional(key) {\n let definedProp;\n if (props.length > 8) {\n // TODO maybe an option instead of hard-coded 8?\n const propsSchema = (0, util_1.schemaRefOrVal)(it, parentSchema.properties, \"properties\");\n definedProp = (0, code_1.isOwnProperty)(gen, propsSchema, key);\n }\n else if (props.length) {\n definedProp = (0, codegen_1.or)(...props.map((p) => (0, codegen_1._) `${key} === ${p}`));\n }\n else {\n definedProp = codegen_1.nil;\n }\n if (patProps.length) {\n definedProp = (0, codegen_1.or)(definedProp, ...patProps.map((p) => (0, codegen_1._) `${(0, code_1.usePattern)(cxt, p)}.test(${key})`));\n }\n return (0, codegen_1.not)(definedProp);\n }\n function deleteAdditional(key) {\n gen.code((0, codegen_1._) `delete ${data}[${key}]`);\n }\n function additionalPropertyCode(key) {\n if (opts.removeAdditional === \"all\" || (opts.removeAdditional && schema === false)) {\n deleteAdditional(key);\n return;\n }\n if (schema === false) {\n cxt.setParams({ additionalProperty: key });\n cxt.error();\n if (!allErrors)\n gen.break();\n return;\n }\n if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.name(\"valid\");\n if (opts.removeAdditional === \"failing\") {\n applyAdditionalSchema(key, valid, false);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.reset();\n deleteAdditional(key);\n });\n }\n else {\n applyAdditionalSchema(key, valid);\n if (!allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n }\n }\n function applyAdditionalSchema(key, valid, errors) {\n const subschema = {\n keyword: \"additionalProperties\",\n dataProp: key,\n dataPropType: util_1.Type.Str,\n };\n if (errors === false) {\n Object.assign(subschema, {\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n });\n }\n cxt.subschema(subschema, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=additionalProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"allOf\",\n schemaType: \"array\",\n code(cxt) {\n const { gen, schema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const valid = gen.name(\"valid\");\n schema.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n const schCxt = cxt.subschema({ keyword: \"allOf\", schemaProp: i }, valid);\n cxt.ok(valid);\n cxt.mergeEvaluated(schCxt);\n });\n },\n};\nexports.default = def;\n//# sourceMappingURL=allOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"anyOf\",\n schemaType: \"array\",\n trackErrors: true,\n code: code_1.validateUnion,\n error: { message: \"must match a schema in anyOf\" },\n};\nexports.default = def;\n//# sourceMappingURL=anyOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { min, max } }) => max === undefined\n ? (0, codegen_1.str) `must contain at least ${min} valid item(s)`\n : (0, codegen_1.str) `must contain at least ${min} and no more than ${max} valid item(s)`,\n params: ({ params: { min, max } }) => max === undefined ? (0, codegen_1._) `{minContains: ${min}}` : (0, codegen_1._) `{minContains: ${min}, maxContains: ${max}}`,\n};\nconst def = {\n keyword: \"contains\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n let min;\n let max;\n const { minContains, maxContains } = parentSchema;\n if (it.opts.next) {\n min = minContains === undefined ? 1 : minContains;\n max = maxContains;\n }\n else {\n min = 1;\n }\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n cxt.setParams({ min, max });\n if (max === undefined && min === 0) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" == 0 without \"maxContains\": \"contains\" keyword ignored`);\n return;\n }\n if (max !== undefined && min > max) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" > \"maxContains\" is always invalid`);\n cxt.fail();\n return;\n }\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n let cond = (0, codegen_1._) `${len} >= ${min}`;\n if (max !== undefined)\n cond = (0, codegen_1._) `${cond} && ${len} <= ${max}`;\n cxt.pass(cond);\n return;\n }\n it.items = true;\n const valid = gen.name(\"valid\");\n if (max === undefined && min === 1) {\n validateItems(valid, () => gen.if(valid, () => gen.break()));\n }\n else if (min === 0) {\n gen.let(valid, true);\n if (max !== undefined)\n gen.if((0, codegen_1._) `${data}.length > 0`, validateItemsWithCount);\n }\n else {\n gen.let(valid, false);\n validateItemsWithCount();\n }\n cxt.result(valid, () => cxt.reset());\n function validateItemsWithCount() {\n const schValid = gen.name(\"_valid\");\n const count = gen.let(\"count\", 0);\n validateItems(schValid, () => gen.if(schValid, () => checkLimits(count)));\n }\n function validateItems(_valid, block) {\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword: \"contains\",\n dataProp: i,\n dataPropType: util_1.Type.Num,\n compositeRule: true,\n }, _valid);\n block();\n });\n }\n function checkLimits(count) {\n gen.code((0, codegen_1._) `${count}++`);\n if (max === undefined) {\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true).break());\n }\n else {\n gen.if((0, codegen_1._) `${count} > ${max}`, () => gen.assign(valid, false).break());\n if (min === 1)\n gen.assign(valid, true);\n else\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true));\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=contains.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateSchemaDeps = exports.validatePropertyDeps = exports.error = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nexports.error = {\n message: ({ params: { property, depsCount, deps } }) => {\n const property_ies = depsCount === 1 ? \"property\" : \"properties\";\n return (0, codegen_1.str) `must have ${property_ies} ${deps} when property ${property} is present`;\n },\n params: ({ params: { property, depsCount, deps, missingProperty } }) => (0, codegen_1._) `{property: ${property},\n missingProperty: ${missingProperty},\n depsCount: ${depsCount},\n deps: ${deps}}`, // TODO change to reference\n};\nconst def = {\n keyword: \"dependencies\",\n type: \"object\",\n schemaType: \"object\",\n error: exports.error,\n code(cxt) {\n const [propDeps, schDeps] = splitDependencies(cxt);\n validatePropertyDeps(cxt, propDeps);\n validateSchemaDeps(cxt, schDeps);\n },\n};\nfunction splitDependencies({ schema }) {\n const propertyDeps = {};\n const schemaDeps = {};\n for (const key in schema) {\n if (key === \"__proto__\")\n continue;\n const deps = Array.isArray(schema[key]) ? propertyDeps : schemaDeps;\n deps[key] = schema[key];\n }\n return [propertyDeps, schemaDeps];\n}\nfunction validatePropertyDeps(cxt, propertyDeps = cxt.schema) {\n const { gen, data, it } = cxt;\n if (Object.keys(propertyDeps).length === 0)\n return;\n const missing = gen.let(\"missing\");\n for (const prop in propertyDeps) {\n const deps = propertyDeps[prop];\n if (deps.length === 0)\n continue;\n const hasProperty = (0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties);\n cxt.setParams({\n property: prop,\n depsCount: deps.length,\n deps: deps.join(\", \"),\n });\n if (it.allErrors) {\n gen.if(hasProperty, () => {\n for (const depProp of deps) {\n (0, code_1.checkReportMissingProp)(cxt, depProp);\n }\n });\n }\n else {\n gen.if((0, codegen_1._) `${hasProperty} && (${(0, code_1.checkMissingProp)(cxt, deps, missing)})`);\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n}\nexports.validatePropertyDeps = validatePropertyDeps;\nfunction validateSchemaDeps(cxt, schemaDeps = cxt.schema) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n for (const prop in schemaDeps) {\n if ((0, util_1.alwaysValidSchema)(it, schemaDeps[prop]))\n continue;\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties), () => {\n const schCxt = cxt.subschema({ keyword, schemaProp: prop }, valid);\n cxt.mergeValidEvaluated(schCxt, valid);\n }, () => gen.var(valid, true) // TODO var\n );\n cxt.ok(valid);\n }\n}\nexports.validateSchemaDeps = validateSchemaDeps;\nexports.default = def;\n//# sourceMappingURL=dependencies.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params }) => (0, codegen_1.str) `must match \"${params.ifClause}\" schema`,\n params: ({ params }) => (0, codegen_1._) `{failingKeyword: ${params.ifClause}}`,\n};\nconst def = {\n keyword: \"if\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, parentSchema, it } = cxt;\n if (parentSchema.then === undefined && parentSchema.else === undefined) {\n (0, util_1.checkStrictMode)(it, '\"if\" without \"then\" and \"else\" is ignored');\n }\n const hasThen = hasSchema(it, \"then\");\n const hasElse = hasSchema(it, \"else\");\n if (!hasThen && !hasElse)\n return;\n const valid = gen.let(\"valid\", true);\n const schValid = gen.name(\"_valid\");\n validateIf();\n cxt.reset();\n if (hasThen && hasElse) {\n const ifClause = gen.let(\"ifClause\");\n cxt.setParams({ ifClause });\n gen.if(schValid, validateClause(\"then\", ifClause), validateClause(\"else\", ifClause));\n }\n else if (hasThen) {\n gen.if(schValid, validateClause(\"then\"));\n }\n else {\n gen.if((0, codegen_1.not)(schValid), validateClause(\"else\"));\n }\n cxt.pass(valid, () => cxt.error(true));\n function validateIf() {\n const schCxt = cxt.subschema({\n keyword: \"if\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, schValid);\n cxt.mergeEvaluated(schCxt);\n }\n function validateClause(keyword, ifClause) {\n return () => {\n const schCxt = cxt.subschema({ keyword }, schValid);\n gen.assign(valid, schValid);\n cxt.mergeValidEvaluated(schCxt, valid);\n if (ifClause)\n gen.assign(ifClause, (0, codegen_1._) `${keyword}`);\n else\n cxt.setParams({ ifClause: keyword });\n };\n }\n },\n};\nfunction hasSchema(it, keyword) {\n const schema = it.schema[keyword];\n return schema !== undefined && !(0, util_1.alwaysValidSchema)(it, schema);\n}\nexports.default = def;\n//# sourceMappingURL=if.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst additionalItems_1 = require(\"./additionalItems\");\nconst prefixItems_1 = require(\"./prefixItems\");\nconst items_1 = require(\"./items\");\nconst items2020_1 = require(\"./items2020\");\nconst contains_1 = require(\"./contains\");\nconst dependencies_1 = require(\"./dependencies\");\nconst propertyNames_1 = require(\"./propertyNames\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst properties_1 = require(\"./properties\");\nconst patternProperties_1 = require(\"./patternProperties\");\nconst not_1 = require(\"./not\");\nconst anyOf_1 = require(\"./anyOf\");\nconst oneOf_1 = require(\"./oneOf\");\nconst allOf_1 = require(\"./allOf\");\nconst if_1 = require(\"./if\");\nconst thenElse_1 = require(\"./thenElse\");\nfunction getApplicator(draft2020 = false) {\n const applicator = [\n // any\n not_1.default,\n anyOf_1.default,\n oneOf_1.default,\n allOf_1.default,\n if_1.default,\n thenElse_1.default,\n // object\n propertyNames_1.default,\n additionalProperties_1.default,\n dependencies_1.default,\n properties_1.default,\n patternProperties_1.default,\n ];\n // array\n if (draft2020)\n applicator.push(prefixItems_1.default, items2020_1.default);\n else\n applicator.push(additionalItems_1.default, items_1.default);\n applicator.push(contains_1.default);\n return applicator;\n}\nexports.default = getApplicator;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateTuple = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"array\", \"boolean\"],\n before: \"uniqueItems\",\n code(cxt) {\n const { schema, it } = cxt;\n if (Array.isArray(schema))\n return validateTuple(cxt, \"additionalItems\", schema);\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nfunction validateTuple(cxt, extraItems, schArr = cxt.schema) {\n const { gen, parentSchema, data, keyword, it } = cxt;\n checkStrictTuple(parentSchema);\n if (it.opts.unevaluated && schArr.length && it.items !== true) {\n it.items = util_1.mergeEvaluated.items(gen, schArr.length, it.items);\n }\n const valid = gen.name(\"valid\");\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n schArr.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n gen.if((0, codegen_1._) `${len} > ${i}`, () => cxt.subschema({\n keyword,\n schemaProp: i,\n dataProp: i,\n }, valid));\n cxt.ok(valid);\n });\n function checkStrictTuple(sch) {\n const { opts, errSchemaPath } = it;\n const l = schArr.length;\n const fullTuple = l === sch.minItems && (l === sch.maxItems || sch[extraItems] === false);\n if (opts.strictTuples && !fullTuple) {\n const msg = `\"${keyword}\" is ${l}-tuple, but minItems or maxItems/${extraItems} are not specified or different at path \"${errSchemaPath}\"`;\n (0, util_1.checkStrictMode)(it, msg, opts.strictTuples);\n }\n }\n}\nexports.validateTuple = validateTuple;\nexports.default = def;\n//# sourceMappingURL=items.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst additionalItems_1 = require(\"./additionalItems\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { schema, parentSchema, it } = cxt;\n const { prefixItems } = parentSchema;\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n if (prefixItems)\n (0, additionalItems_1.validateAdditionalItems)(cxt, prefixItems);\n else\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nexports.default = def;\n//# sourceMappingURL=items2020.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"not\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n code(cxt) {\n const { gen, schema, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n cxt.fail();\n return;\n }\n const valid = gen.name(\"valid\");\n cxt.subschema({\n keyword: \"not\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, valid);\n cxt.failResult(valid, () => cxt.reset(), () => cxt.error());\n },\n error: { message: \"must NOT be valid\" },\n};\nexports.default = def;\n//# sourceMappingURL=not.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must match exactly one schema in oneOf\",\n params: ({ params }) => (0, codegen_1._) `{passingSchemas: ${params.passing}}`,\n};\nconst def = {\n keyword: \"oneOf\",\n schemaType: \"array\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n if (it.opts.discriminator && parentSchema.discriminator)\n return;\n const schArr = schema;\n const valid = gen.let(\"valid\", false);\n const passing = gen.let(\"passing\", null);\n const schValid = gen.name(\"_valid\");\n cxt.setParams({ passing });\n // TODO possibly fail straight away (with warning or exception) if there are two empty always valid schemas\n gen.block(validateOneOf);\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n function validateOneOf() {\n schArr.forEach((sch, i) => {\n let schCxt;\n if ((0, util_1.alwaysValidSchema)(it, sch)) {\n gen.var(schValid, true);\n }\n else {\n schCxt = cxt.subschema({\n keyword: \"oneOf\",\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n }\n if (i > 0) {\n gen\n .if((0, codegen_1._) `${schValid} && ${valid}`)\n .assign(valid, false)\n .assign(passing, (0, codegen_1._) `[${passing}, ${i}]`)\n .else();\n }\n gen.if(schValid, () => {\n gen.assign(valid, true);\n gen.assign(passing, i);\n if (schCxt)\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=oneOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst util_2 = require(\"../../compile/util\");\nconst def = {\n keyword: \"patternProperties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, data, parentSchema, it } = cxt;\n const { opts } = it;\n const patterns = (0, code_1.allSchemaProperties)(schema);\n const alwaysValidPatterns = patterns.filter((p) => (0, util_1.alwaysValidSchema)(it, schema[p]));\n if (patterns.length === 0 ||\n (alwaysValidPatterns.length === patterns.length &&\n (!it.opts.unevaluated || it.props === true))) {\n return;\n }\n const checkProperties = opts.strictSchema && !opts.allowMatchingProperties && parentSchema.properties;\n const valid = gen.name(\"valid\");\n if (it.props !== true && !(it.props instanceof codegen_1.Name)) {\n it.props = (0, util_2.evaluatedPropsToName)(gen, it.props);\n }\n const { props } = it;\n validatePatternProperties();\n function validatePatternProperties() {\n for (const pat of patterns) {\n if (checkProperties)\n checkMatchingProperties(pat);\n if (it.allErrors) {\n validateProperties(pat);\n }\n else {\n gen.var(valid, true); // TODO var\n validateProperties(pat);\n gen.if(valid);\n }\n }\n }\n function checkMatchingProperties(pat) {\n for (const prop in checkProperties) {\n if (new RegExp(pat).test(prop)) {\n (0, util_1.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`);\n }\n }\n }\n function validateProperties(pat) {\n gen.forIn(\"key\", data, (key) => {\n gen.if((0, codegen_1._) `${(0, code_1.usePattern)(cxt, pat)}.test(${key})`, () => {\n const alwaysValid = alwaysValidPatterns.includes(pat);\n if (!alwaysValid) {\n cxt.subschema({\n keyword: \"patternProperties\",\n schemaProp: pat,\n dataProp: key,\n dataPropType: util_2.Type.Str,\n }, valid);\n }\n if (it.opts.unevaluated && props !== true) {\n gen.assign((0, codegen_1._) `${props}[${key}]`, true);\n }\n else if (!alwaysValid && !it.allErrors) {\n // can short-circuit if `unevaluatedProperties` is not supported (opts.next === false)\n // or if all properties were evaluated (props === true)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=patternProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst items_1 = require(\"./items\");\nconst def = {\n keyword: \"prefixItems\",\n type: \"array\",\n schemaType: [\"array\"],\n before: \"uniqueItems\",\n code: (cxt) => (0, items_1.validateTuple)(cxt, \"items\"),\n};\nexports.default = def;\n//# sourceMappingURL=prefixItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst validate_1 = require(\"../../compile/validate\");\nconst code_1 = require(\"../code\");\nconst util_1 = require(\"../../compile/util\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst def = {\n keyword: \"properties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n if (it.opts.removeAdditional === \"all\" && parentSchema.additionalProperties === undefined) {\n additionalProperties_1.default.code(new validate_1.KeywordCxt(it, additionalProperties_1.default, \"additionalProperties\"));\n }\n const allProps = (0, code_1.allSchemaProperties)(schema);\n for (const prop of allProps) {\n it.definedProperties.add(prop);\n }\n if (it.opts.unevaluated && allProps.length && it.props !== true) {\n it.props = util_1.mergeEvaluated.props(gen, (0, util_1.toHash)(allProps), it.props);\n }\n const properties = allProps.filter((p) => !(0, util_1.alwaysValidSchema)(it, schema[p]));\n if (properties.length === 0)\n return;\n const valid = gen.name(\"valid\");\n for (const prop of properties) {\n if (hasDefault(prop)) {\n applyPropertySchema(prop);\n }\n else {\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties));\n applyPropertySchema(prop);\n if (!it.allErrors)\n gen.else().var(valid, true);\n gen.endIf();\n }\n cxt.it.definedProperties.add(prop);\n cxt.ok(valid);\n }\n function hasDefault(prop) {\n return it.opts.useDefaults && !it.compositeRule && schema[prop].default !== undefined;\n }\n function applyPropertySchema(prop) {\n cxt.subschema({\n keyword: \"properties\",\n schemaProp: prop,\n dataProp: prop,\n }, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=properties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"property name must be valid\",\n params: ({ params }) => (0, codegen_1._) `{propertyName: ${params.propertyName}}`,\n};\nconst def = {\n keyword: \"propertyNames\",\n type: \"object\",\n schemaType: [\"object\", \"boolean\"],\n error,\n code(cxt) {\n const { gen, schema, data, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n const valid = gen.name(\"valid\");\n gen.forIn(\"key\", data, (key) => {\n cxt.setParams({ propertyName: key });\n cxt.subschema({\n keyword: \"propertyNames\",\n data: key,\n dataTypes: [\"string\"],\n propertyName: key,\n compositeRule: true,\n }, valid);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error(true);\n if (!it.allErrors)\n gen.break();\n });\n });\n cxt.ok(valid);\n },\n};\nexports.default = def;\n//# sourceMappingURL=propertyNames.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: [\"then\", \"else\"],\n schemaType: [\"object\", \"boolean\"],\n code({ keyword, parentSchema, it }) {\n if (parentSchema.if === undefined)\n (0, util_1.checkStrictMode)(it, `\"${keyword}\" without \"if\" is ignored`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=thenElse.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateUnion = exports.validateArray = exports.usePattern = exports.callValidateCode = exports.schemaProperties = exports.allSchemaProperties = exports.noPropertyInData = exports.propertyInData = exports.isOwnProperty = exports.hasPropFunc = exports.reportMissingProp = exports.checkMissingProp = exports.checkReportMissingProp = void 0;\nconst codegen_1 = require(\"../compile/codegen\");\nconst util_1 = require(\"../compile/util\");\nconst names_1 = require(\"../compile/names\");\nconst util_2 = require(\"../compile/util\");\nfunction checkReportMissingProp(cxt, prop) {\n const { gen, data, it } = cxt;\n gen.if(noPropertyInData(gen, data, prop, it.opts.ownProperties), () => {\n cxt.setParams({ missingProperty: (0, codegen_1._) `${prop}` }, true);\n cxt.error();\n });\n}\nexports.checkReportMissingProp = checkReportMissingProp;\nfunction checkMissingProp({ gen, data, it: { opts } }, properties, missing) {\n return (0, codegen_1.or)(...properties.map((prop) => (0, codegen_1.and)(noPropertyInData(gen, data, prop, opts.ownProperties), (0, codegen_1._) `${missing} = ${prop}`)));\n}\nexports.checkMissingProp = checkMissingProp;\nfunction reportMissingProp(cxt, missing) {\n cxt.setParams({ missingProperty: missing }, true);\n cxt.error();\n}\nexports.reportMissingProp = reportMissingProp;\nfunction hasPropFunc(gen) {\n return gen.scopeValue(\"func\", {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n ref: Object.prototype.hasOwnProperty,\n code: (0, codegen_1._) `Object.prototype.hasOwnProperty`,\n });\n}\nexports.hasPropFunc = hasPropFunc;\nfunction isOwnProperty(gen, data, property) {\n return (0, codegen_1._) `${hasPropFunc(gen)}.call(${data}, ${property})`;\n}\nexports.isOwnProperty = isOwnProperty;\nfunction propertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} !== undefined`;\n return ownProperties ? (0, codegen_1._) `${cond} && ${isOwnProperty(gen, data, property)}` : cond;\n}\nexports.propertyInData = propertyInData;\nfunction noPropertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} === undefined`;\n return ownProperties ? (0, codegen_1.or)(cond, (0, codegen_1.not)(isOwnProperty(gen, data, property))) : cond;\n}\nexports.noPropertyInData = noPropertyInData;\nfunction allSchemaProperties(schemaMap) {\n return schemaMap ? Object.keys(schemaMap).filter((p) => p !== \"__proto__\") : [];\n}\nexports.allSchemaProperties = allSchemaProperties;\nfunction schemaProperties(it, schemaMap) {\n return allSchemaProperties(schemaMap).filter((p) => !(0, util_1.alwaysValidSchema)(it, schemaMap[p]));\n}\nexports.schemaProperties = schemaProperties;\nfunction callValidateCode({ schemaCode, data, it: { gen, topSchemaRef, schemaPath, errorPath }, it }, func, context, passSchema) {\n const dataAndSchema = passSchema ? (0, codegen_1._) `${schemaCode}, ${data}, ${topSchemaRef}${schemaPath}` : data;\n const valCxt = [\n [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, errorPath)],\n [names_1.default.parentData, it.parentData],\n [names_1.default.parentDataProperty, it.parentDataProperty],\n [names_1.default.rootData, names_1.default.rootData],\n ];\n if (it.opts.dynamicRef)\n valCxt.push([names_1.default.dynamicAnchors, names_1.default.dynamicAnchors]);\n const args = (0, codegen_1._) `${dataAndSchema}, ${gen.object(...valCxt)}`;\n return context !== codegen_1.nil ? (0, codegen_1._) `${func}.call(${context}, ${args})` : (0, codegen_1._) `${func}(${args})`;\n}\nexports.callValidateCode = callValidateCode;\nconst newRegExp = (0, codegen_1._) `new RegExp`;\nfunction usePattern({ gen, it: { opts } }, pattern) {\n const u = opts.unicodeRegExp ? \"u\" : \"\";\n const { regExp } = opts.code;\n const rx = regExp(pattern, u);\n return gen.scopeValue(\"pattern\", {\n key: rx.toString(),\n ref: rx,\n code: (0, codegen_1._) `${regExp.code === \"new RegExp\" ? newRegExp : (0, util_2.useFunc)(gen, regExp)}(${pattern}, ${u})`,\n });\n}\nexports.usePattern = usePattern;\nfunction validateArray(cxt) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n if (it.allErrors) {\n const validArr = gen.let(\"valid\", true);\n validateItems(() => gen.assign(validArr, false));\n return validArr;\n }\n gen.var(valid, true);\n validateItems(() => gen.break());\n return valid;\n function validateItems(notValid) {\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword,\n dataProp: i,\n dataPropType: util_1.Type.Num,\n }, valid);\n gen.if((0, codegen_1.not)(valid), notValid);\n });\n }\n}\nexports.validateArray = validateArray;\nfunction validateUnion(cxt) {\n const { gen, schema, keyword, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const alwaysValid = schema.some((sch) => (0, util_1.alwaysValidSchema)(it, sch));\n if (alwaysValid && !it.opts.unevaluated)\n return;\n const valid = gen.let(\"valid\", false);\n const schValid = gen.name(\"_valid\");\n gen.block(() => schema.forEach((_sch, i) => {\n const schCxt = cxt.subschema({\n keyword,\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n gen.assign(valid, (0, codegen_1._) `${valid} || ${schValid}`);\n const merged = cxt.mergeValidEvaluated(schCxt, schValid);\n // can short-circuit if `unevaluatedProperties/Items` not supported (opts.unevaluated !== true)\n // or if all properties and items were evaluated (it.props === true && it.items === true)\n if (!merged)\n gen.if((0, codegen_1.not)(valid));\n }));\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n}\nexports.validateUnion = validateUnion;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst def = {\n keyword: \"id\",\n code() {\n throw new Error('NOT SUPPORTED: keyword \"id\", use \"$id\" for schema ID');\n },\n};\nexports.default = def;\n//# sourceMappingURL=id.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst id_1 = require(\"./id\");\nconst ref_1 = require(\"./ref\");\nconst core = [\n \"$schema\",\n \"$id\",\n \"$defs\",\n \"$vocabulary\",\n { keyword: \"$comment\" },\n \"definitions\",\n id_1.default,\n ref_1.default,\n];\nexports.default = core;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callRef = exports.getValidate = void 0;\nconst ref_error_1 = require(\"../../compile/ref_error\");\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"$ref\",\n schemaType: \"string\",\n code(cxt) {\n const { gen, schema: $ref, it } = cxt;\n const { baseId, schemaEnv: env, validateName, opts, self } = it;\n const { root } = env;\n if (($ref === \"#\" || $ref === \"#/\") && baseId === root.baseId)\n return callRootRef();\n const schOrEnv = compile_1.resolveRef.call(self, root, baseId, $ref);\n if (schOrEnv === undefined)\n throw new ref_error_1.default(it.opts.uriResolver, baseId, $ref);\n if (schOrEnv instanceof compile_1.SchemaEnv)\n return callValidate(schOrEnv);\n return inlineRefSchema(schOrEnv);\n function callRootRef() {\n if (env === root)\n return callRef(cxt, validateName, env, env.$async);\n const rootName = gen.scopeValue(\"root\", { ref: root });\n return callRef(cxt, (0, codegen_1._) `${rootName}.validate`, root, root.$async);\n }\n function callValidate(sch) {\n const v = getValidate(cxt, sch);\n callRef(cxt, v, sch, sch.$async);\n }\n function inlineRefSchema(sch) {\n const schName = gen.scopeValue(\"schema\", opts.code.source === true ? { ref: sch, code: (0, codegen_1.stringify)(sch) } : { ref: sch });\n const valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({\n schema: sch,\n dataTypes: [],\n schemaPath: codegen_1.nil,\n topSchemaRef: schName,\n errSchemaPath: $ref,\n }, valid);\n cxt.mergeEvaluated(schCxt);\n cxt.ok(valid);\n }\n },\n};\nfunction getValidate(cxt, sch) {\n const { gen } = cxt;\n return sch.validate\n ? gen.scopeValue(\"validate\", { ref: sch.validate })\n : (0, codegen_1._) `${gen.scopeValue(\"wrapper\", { ref: sch })}.validate`;\n}\nexports.getValidate = getValidate;\nfunction callRef(cxt, v, sch, $async) {\n const { gen, it } = cxt;\n const { allErrors, schemaEnv: env, opts } = it;\n const passCxt = opts.passContext ? names_1.default.this : codegen_1.nil;\n if ($async)\n callAsyncRef();\n else\n callSyncRef();\n function callAsyncRef() {\n if (!env.$async)\n throw new Error(\"async schema referenced by sync schema\");\n const valid = gen.let(\"valid\");\n gen.try(() => {\n gen.code((0, codegen_1._) `await ${(0, code_1.callValidateCode)(cxt, v, passCxt)}`);\n addEvaluatedFrom(v); // TODO will not work with async, it has to be returned with the result\n if (!allErrors)\n gen.assign(valid, true);\n }, (e) => {\n gen.if((0, codegen_1._) `!(${e} instanceof ${it.ValidationError})`, () => gen.throw(e));\n addErrorsFrom(e);\n if (!allErrors)\n gen.assign(valid, false);\n });\n cxt.ok(valid);\n }\n function callSyncRef() {\n cxt.result((0, code_1.callValidateCode)(cxt, v, passCxt), () => addEvaluatedFrom(v), () => addErrorsFrom(v));\n }\n function addErrorsFrom(source) {\n const errs = (0, codegen_1._) `${source}.errors`;\n gen.assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`); // TODO tagged\n gen.assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n }\n function addEvaluatedFrom(source) {\n var _a;\n if (!it.opts.unevaluated)\n return;\n const schEvaluated = (_a = sch === null || sch === void 0 ? void 0 : sch.validate) === null || _a === void 0 ? void 0 : _a.evaluated;\n // TODO refactor\n if (it.props !== true) {\n if (schEvaluated && !schEvaluated.dynamicProps) {\n if (schEvaluated.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schEvaluated.props, it.props);\n }\n }\n else {\n const props = gen.var(\"props\", (0, codegen_1._) `${source}.evaluated.props`);\n it.props = util_1.mergeEvaluated.props(gen, props, it.props, codegen_1.Name);\n }\n }\n if (it.items !== true) {\n if (schEvaluated && !schEvaluated.dynamicItems) {\n if (schEvaluated.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schEvaluated.items, it.items);\n }\n }\n else {\n const items = gen.var(\"items\", (0, codegen_1._) `${source}.evaluated.items`);\n it.items = util_1.mergeEvaluated.items(gen, items, it.items, codegen_1.Name);\n }\n }\n }\n}\nexports.callRef = callRef;\nexports.default = def;\n//# sourceMappingURL=ref.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst types_1 = require(\"../discriminator/types\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { discrError, tagName } }) => discrError === types_1.DiscrError.Tag\n ? `tag \"${tagName}\" must be string`\n : `value of tag \"${tagName}\" must be in oneOf`,\n params: ({ params: { discrError, tag, tagName } }) => (0, codegen_1._) `{error: ${discrError}, tag: ${tagName}, tagValue: ${tag}}`,\n};\nconst def = {\n keyword: \"discriminator\",\n type: \"object\",\n schemaType: \"object\",\n error,\n code(cxt) {\n const { gen, data, schema, parentSchema, it } = cxt;\n const { oneOf } = parentSchema;\n if (!it.opts.discriminator) {\n throw new Error(\"discriminator: requires discriminator option\");\n }\n const tagName = schema.propertyName;\n if (typeof tagName != \"string\")\n throw new Error(\"discriminator: requires propertyName\");\n if (schema.mapping)\n throw new Error(\"discriminator: mapping is not supported\");\n if (!oneOf)\n throw new Error(\"discriminator: requires oneOf keyword\");\n const valid = gen.let(\"valid\", false);\n const tag = gen.const(\"tag\", (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(tagName)}`);\n gen.if((0, codegen_1._) `typeof ${tag} == \"string\"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1.DiscrError.Tag, tag, tagName }));\n cxt.ok(valid);\n function validateMapping() {\n const mapping = getMapping();\n gen.if(false);\n for (const tagValue in mapping) {\n gen.elseIf((0, codegen_1._) `${tag} === ${tagValue}`);\n gen.assign(valid, applyTagSchema(mapping[tagValue]));\n }\n gen.else();\n cxt.error(false, { discrError: types_1.DiscrError.Mapping, tag, tagName });\n gen.endIf();\n }\n function applyTagSchema(schemaProp) {\n const _valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({ keyword: \"oneOf\", schemaProp }, _valid);\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n return _valid;\n }\n function getMapping() {\n var _a;\n const oneOfMapping = {};\n const topRequired = hasRequired(parentSchema);\n let tagRequired = true;\n for (let i = 0; i < oneOf.length; i++) {\n let sch = oneOf[i];\n if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1.schemaHasRulesButRef)(sch, it.self.RULES)) {\n sch = compile_1.resolveRef.call(it.self, it.schemaEnv.root, it.baseId, sch === null || sch === void 0 ? void 0 : sch.$ref);\n if (sch instanceof compile_1.SchemaEnv)\n sch = sch.schema;\n }\n const propSch = (_a = sch === null || sch === void 0 ? void 0 : sch.properties) === null || _a === void 0 ? void 0 : _a[tagName];\n if (typeof propSch != \"object\") {\n throw new Error(`discriminator: oneOf subschemas (or referenced schemas) must have \"properties/${tagName}\"`);\n }\n tagRequired = tagRequired && (topRequired || hasRequired(sch));\n addMappings(propSch, i);\n }\n if (!tagRequired)\n throw new Error(`discriminator: \"${tagName}\" must be required`);\n return oneOfMapping;\n function hasRequired({ required }) {\n return Array.isArray(required) && required.includes(tagName);\n }\n function addMappings(sch, i) {\n if (sch.const) {\n addMapping(sch.const, i);\n }\n else if (sch.enum) {\n for (const tagValue of sch.enum) {\n addMapping(tagValue, i);\n }\n }\n else {\n throw new Error(`discriminator: \"properties/${tagName}\" must have \"const\" or \"enum\"`);\n }\n }\n function addMapping(tagValue, i) {\n if (typeof tagValue != \"string\" || tagValue in oneOfMapping) {\n throw new Error(`discriminator: \"${tagName}\" values must be unique strings`);\n }\n oneOfMapping[tagValue] = i;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiscrError = void 0;\nvar DiscrError;\n(function (DiscrError) {\n DiscrError[\"Tag\"] = \"tag\";\n DiscrError[\"Mapping\"] = \"mapping\";\n})(DiscrError = exports.DiscrError || (exports.DiscrError = {}));\n//# sourceMappingURL=types.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core_1 = require(\"./core\");\nconst validation_1 = require(\"./validation\");\nconst applicator_1 = require(\"./applicator\");\nconst format_1 = require(\"./format\");\nconst metadata_1 = require(\"./metadata\");\nconst draft7Vocabularies = [\n core_1.default,\n validation_1.default,\n (0, applicator_1.default)(),\n format_1.default,\n metadata_1.metadataVocabulary,\n metadata_1.contentVocabulary,\n];\nexports.default = draft7Vocabularies;\n//# sourceMappingURL=draft7.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match format \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{format: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"format\",\n type: [\"number\", \"string\"],\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt, ruleType) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n const { opts, errSchemaPath, schemaEnv, self } = it;\n if (!opts.validateFormats)\n return;\n if ($data)\n validate$DataFormat();\n else\n validateFormat();\n function validate$DataFormat() {\n const fmts = gen.scopeValue(\"formats\", {\n ref: self.formats,\n code: opts.code.formats,\n });\n const fDef = gen.const(\"fDef\", (0, codegen_1._) `${fmts}[${schemaCode}]`);\n const fType = gen.let(\"fType\");\n const format = gen.let(\"format\");\n // TODO simplify\n gen.if((0, codegen_1._) `typeof ${fDef} == \"object\" && !(${fDef} instanceof RegExp)`, () => gen.assign(fType, (0, codegen_1._) `${fDef}.type || \"string\"`).assign(format, (0, codegen_1._) `${fDef}.validate`), () => gen.assign(fType, (0, codegen_1._) `\"string\"`).assign(format, fDef));\n cxt.fail$data((0, codegen_1.or)(unknownFmt(), invalidFmt()));\n function unknownFmt() {\n if (opts.strictSchema === false)\n return codegen_1.nil;\n return (0, codegen_1._) `${schemaCode} && !${format}`;\n }\n function invalidFmt() {\n const callFormat = schemaEnv.$async\n ? (0, codegen_1._) `(${fDef}.async ? await ${format}(${data}) : ${format}(${data}))`\n : (0, codegen_1._) `${format}(${data})`;\n const validData = (0, codegen_1._) `(typeof ${format} == \"function\" ? ${callFormat} : ${format}.test(${data}))`;\n return (0, codegen_1._) `${format} && ${format} !== true && ${fType} === ${ruleType} && !${validData}`;\n }\n }\n function validateFormat() {\n const formatDef = self.formats[schema];\n if (!formatDef) {\n unknownFormat();\n return;\n }\n if (formatDef === true)\n return;\n const [fmtType, format, fmtRef] = getFormat(formatDef);\n if (fmtType === ruleType)\n cxt.pass(validCondition());\n function unknownFormat() {\n if (opts.strictSchema === false) {\n self.logger.warn(unknownMsg());\n return;\n }\n throw new Error(unknownMsg());\n function unknownMsg() {\n return `unknown format \"${schema}\" ignored in schema at path \"${errSchemaPath}\"`;\n }\n }\n function getFormat(fmtDef) {\n const code = fmtDef instanceof RegExp\n ? (0, codegen_1.regexpCode)(fmtDef)\n : opts.code.formats\n ? (0, codegen_1._) `${opts.code.formats}${(0, codegen_1.getProperty)(schema)}`\n : undefined;\n const fmt = gen.scopeValue(\"formats\", { key: schema, ref: fmtDef, code });\n if (typeof fmtDef == \"object\" && !(fmtDef instanceof RegExp)) {\n return [fmtDef.type || \"string\", fmtDef.validate, (0, codegen_1._) `${fmt}.validate`];\n }\n return [\"string\", fmtDef, fmt];\n }\n function validCondition() {\n if (typeof formatDef == \"object\" && !(formatDef instanceof RegExp) && formatDef.async) {\n if (!schemaEnv.$async)\n throw new Error(\"async format in sync schema\");\n return (0, codegen_1._) `await ${fmtRef}(${data})`;\n }\n return typeof format == \"function\" ? (0, codegen_1._) `${fmtRef}(${data})` : (0, codegen_1._) `${fmtRef}.test(${data})`;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=format.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst format_1 = require(\"./format\");\nconst format = [format_1.default];\nexports.default = format;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.contentVocabulary = exports.metadataVocabulary = void 0;\nexports.metadataVocabulary = [\n \"title\",\n \"description\",\n \"default\",\n \"deprecated\",\n \"readOnly\",\n \"writeOnly\",\n \"examples\",\n];\nexports.contentVocabulary = [\n \"contentMediaType\",\n \"contentEncoding\",\n \"contentSchema\",\n];\n//# sourceMappingURL=metadata.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to constant\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValue: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"const\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schemaCode, schema } = cxt;\n if ($data || (schema && typeof schema == \"object\")) {\n cxt.fail$data((0, codegen_1._) `!${(0, util_1.useFunc)(gen, equal_1.default)}(${data}, ${schemaCode})`);\n }\n else {\n cxt.fail((0, codegen_1._) `${schema} !== ${data}`);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=const.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to one of the allowed values\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValues: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"enum\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n if (!$data && schema.length === 0)\n throw new Error(\"enum must have non-empty array\");\n const useLoop = schema.length >= it.opts.loopEnum;\n let eql;\n const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1.useFunc)(gen, equal_1.default)));\n let valid;\n if (useLoop || $data) {\n valid = gen.let(\"valid\");\n cxt.block$data(valid, loopEnum);\n }\n else {\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const vSchema = gen.const(\"vSchema\", schemaCode);\n valid = (0, codegen_1.or)(...schema.map((_x, i) => equalCode(vSchema, i)));\n }\n cxt.pass(valid);\n function loopEnum() {\n gen.assign(valid, false);\n gen.forOf(\"v\", schemaCode, (v) => gen.if((0, codegen_1._) `${getEql()}(${data}, ${v})`, () => gen.assign(valid, true).break()));\n }\n function equalCode(vSchema, i) {\n const sch = schema[i];\n return typeof sch === \"object\" && sch !== null\n ? (0, codegen_1._) `${getEql()}(${data}, ${vSchema}[${i}])`\n : (0, codegen_1._) `${data} === ${sch}`;\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=enum.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst limitNumber_1 = require(\"./limitNumber\");\nconst multipleOf_1 = require(\"./multipleOf\");\nconst limitLength_1 = require(\"./limitLength\");\nconst pattern_1 = require(\"./pattern\");\nconst limitProperties_1 = require(\"./limitProperties\");\nconst required_1 = require(\"./required\");\nconst limitItems_1 = require(\"./limitItems\");\nconst uniqueItems_1 = require(\"./uniqueItems\");\nconst const_1 = require(\"./const\");\nconst enum_1 = require(\"./enum\");\nconst validation = [\n // number\n limitNumber_1.default,\n multipleOf_1.default,\n // string\n limitLength_1.default,\n pattern_1.default,\n // object\n limitProperties_1.default,\n required_1.default,\n // array\n limitItems_1.default,\n uniqueItems_1.default,\n // any\n { keyword: \"type\", schemaType: [\"string\", \"array\"] },\n { keyword: \"nullable\", schemaType: \"boolean\" },\n const_1.default,\n enum_1.default,\n];\nexports.default = validation;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxItems\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} items`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxItems\", \"minItems\"],\n type: \"array\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxItems\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `${data}.length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst ucs2length_1 = require(\"../../runtime/ucs2length\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxLength\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} characters`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxLength\", \"minLength\"],\n type: \"string\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode, it } = cxt;\n const op = keyword === \"maxLength\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n const len = it.opts.unicode === false ? (0, codegen_1._) `${data}.length` : (0, codegen_1._) `${(0, util_1.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`;\n cxt.fail$data((0, codegen_1._) `${len} ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitLength.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst ops = codegen_1.operators;\nconst KWDs = {\n maximum: { okStr: \"<=\", ok: ops.LTE, fail: ops.GT },\n minimum: { okStr: \">=\", ok: ops.GTE, fail: ops.LT },\n exclusiveMaximum: { okStr: \"<\", ok: ops.LT, fail: ops.GTE },\n exclusiveMinimum: { okStr: \">\", ok: ops.GT, fail: ops.LTE },\n};\nconst error = {\n message: ({ keyword, schemaCode }) => (0, codegen_1.str) `must be ${KWDs[keyword].okStr} ${schemaCode}`,\n params: ({ keyword, schemaCode }) => (0, codegen_1._) `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: Object.keys(KWDs),\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n cxt.fail$data((0, codegen_1._) `${data} ${KWDs[keyword].fail} ${schemaCode} || isNaN(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitNumber.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxProperties\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} properties`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxProperties\", \"minProperties\"],\n type: \"object\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxProperties\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `Object.keys(${data}).length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must be multiple of ${schemaCode}`,\n params: ({ schemaCode }) => (0, codegen_1._) `{multipleOf: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"multipleOf\",\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, schemaCode, it } = cxt;\n // const bdt = bad$DataType(schemaCode, def.schemaType, $data)\n const prec = it.opts.multipleOfPrecision;\n const res = gen.let(\"res\");\n const invalid = prec\n ? (0, codegen_1._) `Math.abs(Math.round(${res}) - ${res}) > 1e-${prec}`\n : (0, codegen_1._) `${res} !== parseInt(${res})`;\n cxt.fail$data((0, codegen_1._) `(${schemaCode} === 0 || (${res} = ${data}/${schemaCode}, ${invalid}))`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=multipleOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match pattern \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{pattern: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"pattern\",\n type: \"string\",\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt) {\n const { data, $data, schema, schemaCode, it } = cxt;\n // TODO regexp should be wrapped in try/catchs\n const u = it.opts.unicodeRegExp ? \"u\" : \"\";\n const regExp = $data ? (0, codegen_1._) `(new RegExp(${schemaCode}, ${u}))` : (0, code_1.usePattern)(cxt, schema);\n cxt.fail$data((0, codegen_1._) `!${regExp}.test(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { missingProperty } }) => (0, codegen_1.str) `must have required property '${missingProperty}'`,\n params: ({ params: { missingProperty } }) => (0, codegen_1._) `{missingProperty: ${missingProperty}}`,\n};\nconst def = {\n keyword: \"required\",\n type: \"object\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, schema, schemaCode, data, $data, it } = cxt;\n const { opts } = it;\n if (!$data && schema.length === 0)\n return;\n const useLoop = schema.length >= opts.loopRequired;\n if (it.allErrors)\n allErrorsMode();\n else\n exitOnErrorMode();\n if (opts.strictRequired) {\n const props = cxt.parentSchema.properties;\n const { definedProperties } = cxt.it;\n for (const requiredKey of schema) {\n if ((props === null || props === void 0 ? void 0 : props[requiredKey]) === undefined && !definedProperties.has(requiredKey)) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n const msg = `required property \"${requiredKey}\" is not defined at \"${schemaPath}\" (strictRequired)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictRequired);\n }\n }\n }\n function allErrorsMode() {\n if (useLoop || $data) {\n cxt.block$data(codegen_1.nil, loopAllRequired);\n }\n else {\n for (const prop of schema) {\n (0, code_1.checkReportMissingProp)(cxt, prop);\n }\n }\n }\n function exitOnErrorMode() {\n const missing = gen.let(\"missing\");\n if (useLoop || $data) {\n const valid = gen.let(\"valid\", true);\n cxt.block$data(valid, () => loopUntilMissing(missing, valid));\n cxt.ok(valid);\n }\n else {\n gen.if((0, code_1.checkMissingProp)(cxt, schema, missing));\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n function loopAllRequired() {\n gen.forOf(\"prop\", schemaCode, (prop) => {\n cxt.setParams({ missingProperty: prop });\n gen.if((0, code_1.noPropertyInData)(gen, data, prop, opts.ownProperties), () => cxt.error());\n });\n }\n function loopUntilMissing(missing, valid) {\n cxt.setParams({ missingProperty: missing });\n gen.forOf(missing, schemaCode, () => {\n gen.assign(valid, (0, code_1.propertyInData)(gen, data, missing, opts.ownProperties));\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error();\n gen.break();\n });\n }, codegen_1.nil);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=required.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst dataType_1 = require(\"../../compile/validate/dataType\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: ({ params: { i, j } }) => (0, codegen_1.str) `must NOT have duplicate items (items ## ${j} and ${i} are identical)`,\n params: ({ params: { i, j } }) => (0, codegen_1._) `{i: ${i}, j: ${j}}`,\n};\nconst def = {\n keyword: \"uniqueItems\",\n type: \"array\",\n schemaType: \"boolean\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, parentSchema, schemaCode, it } = cxt;\n if (!$data && !schema)\n return;\n const valid = gen.let(\"valid\");\n const itemTypes = parentSchema.items ? (0, dataType_1.getSchemaTypes)(parentSchema.items) : [];\n cxt.block$data(valid, validateUniqueItems, (0, codegen_1._) `${schemaCode} === false`);\n cxt.ok(valid);\n function validateUniqueItems() {\n const i = gen.let(\"i\", (0, codegen_1._) `${data}.length`);\n const j = gen.let(\"j\");\n cxt.setParams({ i, j });\n gen.assign(valid, true);\n gen.if((0, codegen_1._) `${i} > 1`, () => (canOptimize() ? loopN : loopN2)(i, j));\n }\n function canOptimize() {\n return itemTypes.length > 0 && !itemTypes.some((t) => t === \"object\" || t === \"array\");\n }\n function loopN(i, j) {\n const item = gen.name(\"item\");\n const wrongType = (0, dataType_1.checkDataTypes)(itemTypes, item, it.opts.strictNumbers, dataType_1.DataType.Wrong);\n const indices = gen.const(\"indices\", (0, codegen_1._) `{}`);\n gen.for((0, codegen_1._) `;${i}--;`, () => {\n gen.let(item, (0, codegen_1._) `${data}[${i}]`);\n gen.if(wrongType, (0, codegen_1._) `continue`);\n if (itemTypes.length > 1)\n gen.if((0, codegen_1._) `typeof ${item} == \"string\"`, (0, codegen_1._) `${item} += \"_\"`);\n gen\n .if((0, codegen_1._) `typeof ${indices}[${item}] == \"number\"`, () => {\n gen.assign(j, (0, codegen_1._) `${indices}[${item}]`);\n cxt.error();\n gen.assign(valid, false).break();\n })\n .code((0, codegen_1._) `${indices}[${item}] = ${i}`);\n });\n }\n function loopN2(i, j) {\n const eql = (0, util_1.useFunc)(gen, equal_1.default);\n const outer = gen.name(\"outer\");\n gen.label(outer).for((0, codegen_1._) `;${i}--;`, () => gen.for((0, codegen_1._) `${j} = ${i}; ${j}--;`, () => gen.if((0, codegen_1._) `${eql}(${data}[${i}], ${data}[${j}])`, () => {\n cxt.error();\n gen.assign(valid, false).break(outer);\n })));\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=uniqueItems.js.map","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\n// do not edit .js files directly - edit src/index.jst\n\n\n\nmodule.exports = function equal(a, b) {\n if (a === b) return true;\n\n if (a && b && typeof a == 'object' && typeof b == 'object') {\n if (a.constructor !== b.constructor) return false;\n\n var length, i, keys;\n if (Array.isArray(a)) {\n length = a.length;\n if (length != b.length) return false;\n for (i = length; i-- !== 0;)\n if (!equal(a[i], b[i])) return false;\n return true;\n }\n\n\n\n if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;\n if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();\n if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();\n\n keys = Object.keys(a);\n length = keys.length;\n if (length !== Object.keys(b).length) return false;\n\n for (i = length; i-- !== 0;)\n if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;\n\n for (i = length; i-- !== 0;) {\n var key = keys[i];\n\n if (!equal(a[key], b[key])) return false;\n }\n\n return true;\n }\n\n // true if both NaN, false otherwise\n return a!==a && b!==b;\n};\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirsSync = require('../mkdirs').mkdirsSync\nconst utimesMillisSync = require('../util/utimes').utimesMillisSync\nconst stat = require('../util/stat')\n\nfunction copySync (src, dest, opts) {\n if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n opts = opts || {}\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0002'\n )\n }\n\n const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'copy')\n return handleFilterAndCopy(destStat, src, dest, opts)\n}\n\nfunction handleFilterAndCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n const destParent = path.dirname(dest)\n if (!fs.existsSync(destParent)) mkdirsSync(destParent)\n return getStats(destStat, src, dest, opts)\n}\n\nfunction startCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n return getStats(destStat, src, dest, opts)\n}\n\nfunction getStats (destStat, src, dest, opts) {\n const statSync = opts.dereference ? fs.statSync : fs.lstatSync\n const srcStat = statSync(src)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)\n else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)\n else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)\n throw new Error(`Unknown file: ${src}`)\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts) {\n if (!destStat) return copyFile(srcStat, src, dest, opts)\n return mayCopyFile(srcStat, src, dest, opts)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts) {\n if (opts.overwrite) {\n fs.unlinkSync(dest)\n return copyFile(srcStat, src, dest, opts)\n } else if (opts.errorOnExist) {\n throw new Error(`'${dest}' already exists`)\n }\n}\n\nfunction copyFile (srcStat, src, dest, opts) {\n fs.copyFileSync(src, dest)\n if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)\n return setDestMode(dest, srcStat.mode)\n}\n\nfunction handleTimestamps (srcMode, src, dest) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)\n return setDestTimestamps(src, dest)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode) {\n return setDestMode(dest, srcMode | 0o200)\n}\n\nfunction setDestMode (dest, srcMode) {\n return fs.chmodSync(dest, srcMode)\n}\n\nfunction setDestTimestamps (src, dest) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n const updatedSrcStat = fs.statSync(src)\n return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)\n return copyDir(src, dest, opts)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts) {\n fs.mkdirSync(dest)\n copyDir(src, dest, opts)\n return setDestMode(dest, srcMode)\n}\n\nfunction copyDir (src, dest, opts) {\n fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))\n}\n\nfunction copyDirItem (item, src, dest, opts) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)\n return startCopy(destStat, srcItem, destItem, opts)\n}\n\nfunction onLink (destStat, src, dest, opts) {\n let resolvedSrc = fs.readlinkSync(src)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlinkSync(resolvedSrc, dest)\n } else {\n let resolvedDest\n try {\n resolvedDest = fs.readlinkSync(dest)\n } catch (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)\n throw err\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)\n }\n\n // prevent copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)\n }\n return copyLink(resolvedSrc, dest)\n }\n}\n\nfunction copyLink (resolvedSrc, dest) {\n fs.unlinkSync(dest)\n return fs.symlinkSync(resolvedSrc, dest)\n}\n\nmodule.exports = copySync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirs = require('../mkdirs').mkdirs\nconst pathExists = require('../path-exists').pathExists\nconst utimesMillis = require('../util/utimes').utimesMillis\nconst stat = require('../util/stat')\n\nfunction copy (src, dest, opts, cb) {\n if (typeof opts === 'function' && !cb) {\n cb = opts\n opts = {}\n } else if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n cb = cb || function () {}\n opts = opts || {}\n\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0001'\n )\n }\n\n stat.checkPaths(src, dest, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n stat.checkParentPaths(src, srcStat, dest, 'copy', err => {\n if (err) return cb(err)\n if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)\n return checkParentDir(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction checkParentDir (destStat, src, dest, opts, cb) {\n const destParent = path.dirname(dest)\n pathExists(destParent, (err, dirExists) => {\n if (err) return cb(err)\n if (dirExists) return getStats(destStat, src, dest, opts, cb)\n mkdirs(destParent, err => {\n if (err) return cb(err)\n return getStats(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction handleFilter (onInclude, destStat, src, dest, opts, cb) {\n Promise.resolve(opts.filter(src, dest)).then(include => {\n if (include) return onInclude(destStat, src, dest, opts, cb)\n return cb()\n }, error => cb(error))\n}\n\nfunction startCopy (destStat, src, dest, opts, cb) {\n if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)\n return getStats(destStat, src, dest, opts, cb)\n}\n\nfunction getStats (destStat, src, dest, opts, cb) {\n const stat = opts.dereference ? fs.stat : fs.lstat\n stat(src, (err, srcStat) => {\n if (err) return cb(err)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)\n else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`))\n else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`))\n return cb(new Error(`Unknown file: ${src}`))\n })\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return copyFile(srcStat, src, dest, opts, cb)\n return mayCopyFile(srcStat, src, dest, opts, cb)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts, cb) {\n if (opts.overwrite) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return copyFile(srcStat, src, dest, opts, cb)\n })\n } else if (opts.errorOnExist) {\n return cb(new Error(`'${dest}' already exists`))\n } else return cb()\n}\n\nfunction copyFile (srcStat, src, dest, opts, cb) {\n fs.copyFile(src, dest, err => {\n if (err) return cb(err)\n if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)\n return setDestMode(dest, srcStat.mode, cb)\n })\n}\n\nfunction handleTimestampsAndMode (srcMode, src, dest, cb) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) {\n return makeFileWritable(dest, srcMode, err => {\n if (err) return cb(err)\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n })\n }\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode, cb) {\n return setDestMode(dest, srcMode | 0o200, cb)\n}\n\nfunction setDestTimestampsAndMode (srcMode, src, dest, cb) {\n setDestTimestamps(src, dest, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n}\n\nfunction setDestMode (dest, srcMode, cb) {\n return fs.chmod(dest, srcMode, cb)\n}\n\nfunction setDestTimestamps (src, dest, cb) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n fs.stat(src, (err, updatedSrcStat) => {\n if (err) return cb(err)\n return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)\n })\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)\n return copyDir(src, dest, opts, cb)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts, cb) {\n fs.mkdir(dest, err => {\n if (err) return cb(err)\n copyDir(src, dest, opts, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n })\n}\n\nfunction copyDir (src, dest, opts, cb) {\n fs.readdir(src, (err, items) => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n}\n\nfunction copyDirItems (items, src, dest, opts, cb) {\n const item = items.pop()\n if (!item) return cb()\n return copyDirItem(items, item, src, dest, opts, cb)\n}\n\nfunction copyDirItem (items, item, src, dest, opts, cb) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { destStat } = stats\n startCopy(destStat, srcItem, destItem, opts, err => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n })\n}\n\nfunction onLink (destStat, src, dest, opts, cb) {\n fs.readlink(src, (err, resolvedSrc) => {\n if (err) return cb(err)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlink(resolvedSrc, dest, cb)\n } else {\n fs.readlink(dest, (err, resolvedDest) => {\n if (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)\n return cb(err)\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))\n }\n\n // do not copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))\n }\n return copyLink(resolvedSrc, dest, cb)\n })\n }\n })\n}\n\nfunction copyLink (resolvedSrc, dest, cb) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return fs.symlink(resolvedSrc, dest, cb)\n })\n}\n\nmodule.exports = copy\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n copy: u(require('./copy')),\n copySync: require('./copy-sync')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst remove = require('../remove')\n\nconst emptyDir = u(async function emptyDir (dir) {\n let items\n try {\n items = await fs.readdir(dir)\n } catch {\n return mkdir.mkdirs(dir)\n }\n\n return Promise.all(items.map(item => remove.remove(path.join(dir, item))))\n})\n\nfunction emptyDirSync (dir) {\n let items\n try {\n items = fs.readdirSync(dir)\n } catch {\n return mkdir.mkdirsSync(dir)\n }\n\n items.forEach(item => {\n item = path.join(dir, item)\n remove.removeSync(item)\n })\n}\n\nmodule.exports = {\n emptyDirSync,\n emptydirSync: emptyDirSync,\n emptyDir,\n emptydir: emptyDir\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\n\nfunction createFile (file, callback) {\n function makeFile () {\n fs.writeFile(file, '', err => {\n if (err) return callback(err)\n callback()\n })\n }\n\n fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err\n if (!err && stats.isFile()) return callback()\n const dir = path.dirname(file)\n fs.stat(dir, (err, stats) => {\n if (err) {\n // if the directory doesn't exist, make it\n if (err.code === 'ENOENT') {\n return mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeFile()\n })\n }\n return callback(err)\n }\n\n if (stats.isDirectory()) makeFile()\n else {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdir(dir, err => {\n if (err) return callback(err)\n })\n }\n })\n })\n}\n\nfunction createFileSync (file) {\n let stats\n try {\n stats = fs.statSync(file)\n } catch {}\n if (stats && stats.isFile()) return\n\n const dir = path.dirname(file)\n try {\n if (!fs.statSync(dir).isDirectory()) {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdirSync(dir)\n }\n } catch (err) {\n // If the stat call above failed because the directory doesn't exist, create it\n if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)\n else throw err\n }\n\n fs.writeFileSync(file, '')\n}\n\nmodule.exports = {\n createFile: u(createFile),\n createFileSync\n}\n","'use strict'\n\nconst { createFile, createFileSync } = require('./file')\nconst { createLink, createLinkSync } = require('./link')\nconst { createSymlink, createSymlinkSync } = require('./symlink')\n\nmodule.exports = {\n // file\n createFile,\n createFileSync,\n ensureFile: createFile,\n ensureFileSync: createFileSync,\n // link\n createLink,\n createLinkSync,\n ensureLink: createLink,\n ensureLinkSync: createLinkSync,\n // symlink\n createSymlink,\n createSymlinkSync,\n ensureSymlink: createSymlink,\n ensureSymlinkSync: createSymlinkSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\nconst { areIdentical } = require('../util/stat')\n\nfunction createLink (srcpath, dstpath, callback) {\n function makeLink (srcpath, dstpath) {\n fs.link(srcpath, dstpath, err => {\n if (err) return callback(err)\n callback(null)\n })\n }\n\n fs.lstat(dstpath, (_, dstStat) => {\n fs.lstat(srcpath, (err, srcStat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n return callback(err)\n }\n if (dstStat && areIdentical(srcStat, dstStat)) return callback(null)\n\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return makeLink(srcpath, dstpath)\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeLink(srcpath, dstpath)\n })\n })\n })\n })\n}\n\nfunction createLinkSync (srcpath, dstpath) {\n let dstStat\n try {\n dstStat = fs.lstatSync(dstpath)\n } catch {}\n\n try {\n const srcStat = fs.lstatSync(srcpath)\n if (dstStat && areIdentical(srcStat, dstStat)) return\n } catch (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n throw err\n }\n\n const dir = path.dirname(dstpath)\n const dirExists = fs.existsSync(dir)\n if (dirExists) return fs.linkSync(srcpath, dstpath)\n mkdir.mkdirsSync(dir)\n\n return fs.linkSync(srcpath, dstpath)\n}\n\nmodule.exports = {\n createLink: u(createLink),\n createLinkSync\n}\n","'use strict'\n\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst pathExists = require('../path-exists').pathExists\n\n/**\n * Function that returns two types of paths, one relative to symlink, and one\n * relative to the current working directory. Checks if path is absolute or\n * relative. If the path is relative, this function checks if the path is\n * relative to symlink or relative to current working directory. This is an\n * initiative to find a smarter `srcpath` to supply when building symlinks.\n * This allows you to determine which path to use out of one of three possible\n * types of source paths. The first is an absolute path. This is detected by\n * `path.isAbsolute()`. When an absolute path is provided, it is checked to\n * see if it exists. If it does it's used, if not an error is returned\n * (callback)/ thrown (sync). The other two options for `srcpath` are a\n * relative url. By default Node's `fs.symlink` works by creating a symlink\n * using `dstpath` and expects the `srcpath` to be relative to the newly\n * created symlink. If you provide a `srcpath` that does not exist on the file\n * system it results in a broken symlink. To minimize this, the function\n * checks to see if the 'relative to symlink' source file exists, and if it\n * does it will use it. If it does not, it checks if there's a file that\n * exists that is relative to the current working directory, if does its used.\n * This preserves the expectations of the original fs.symlink spec and adds\n * the ability to pass in `relative to current working direcotry` paths.\n */\n\nfunction symlinkPaths (srcpath, dstpath, callback) {\n if (path.isAbsolute(srcpath)) {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: srcpath\n })\n })\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n return pathExists(relativeToDst, (err, exists) => {\n if (err) return callback(err)\n if (exists) {\n return callback(null, {\n toCwd: relativeToDst,\n toDst: srcpath\n })\n } else {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n })\n })\n }\n })\n }\n}\n\nfunction symlinkPathsSync (srcpath, dstpath) {\n let exists\n if (path.isAbsolute(srcpath)) {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('absolute srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: srcpath\n }\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n exists = fs.existsSync(relativeToDst)\n if (exists) {\n return {\n toCwd: relativeToDst,\n toDst: srcpath\n }\n } else {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('relative srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n }\n }\n }\n}\n\nmodule.exports = {\n symlinkPaths,\n symlinkPathsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction symlinkType (srcpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n if (type) return callback(null, type)\n fs.lstat(srcpath, (err, stats) => {\n if (err) return callback(null, 'file')\n type = (stats && stats.isDirectory()) ? 'dir' : 'file'\n callback(null, type)\n })\n}\n\nfunction symlinkTypeSync (srcpath, type) {\n let stats\n\n if (type) return type\n try {\n stats = fs.lstatSync(srcpath)\n } catch {\n return 'file'\n }\n return (stats && stats.isDirectory()) ? 'dir' : 'file'\n}\n\nmodule.exports = {\n symlinkType,\n symlinkTypeSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('../fs')\nconst _mkdirs = require('../mkdirs')\nconst mkdirs = _mkdirs.mkdirs\nconst mkdirsSync = _mkdirs.mkdirsSync\n\nconst _symlinkPaths = require('./symlink-paths')\nconst symlinkPaths = _symlinkPaths.symlinkPaths\nconst symlinkPathsSync = _symlinkPaths.symlinkPathsSync\n\nconst _symlinkType = require('./symlink-type')\nconst symlinkType = _symlinkType.symlinkType\nconst symlinkTypeSync = _symlinkType.symlinkTypeSync\n\nconst pathExists = require('../path-exists').pathExists\n\nconst { areIdentical } = require('../util/stat')\n\nfunction createSymlink (srcpath, dstpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n\n fs.lstat(dstpath, (err, stats) => {\n if (!err && stats.isSymbolicLink()) {\n Promise.all([\n fs.stat(srcpath),\n fs.stat(dstpath)\n ]).then(([srcStat, dstStat]) => {\n if (areIdentical(srcStat, dstStat)) return callback(null)\n _createSymlink(srcpath, dstpath, type, callback)\n })\n } else _createSymlink(srcpath, dstpath, type, callback)\n })\n}\n\nfunction _createSymlink (srcpath, dstpath, type, callback) {\n symlinkPaths(srcpath, dstpath, (err, relative) => {\n if (err) return callback(err)\n srcpath = relative.toDst\n symlinkType(relative.toCwd, type, (err, type) => {\n if (err) return callback(err)\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)\n mkdirs(dir, err => {\n if (err) return callback(err)\n fs.symlink(srcpath, dstpath, type, callback)\n })\n })\n })\n })\n}\n\nfunction createSymlinkSync (srcpath, dstpath, type) {\n let stats\n try {\n stats = fs.lstatSync(dstpath)\n } catch {}\n if (stats && stats.isSymbolicLink()) {\n const srcStat = fs.statSync(srcpath)\n const dstStat = fs.statSync(dstpath)\n if (areIdentical(srcStat, dstStat)) return\n }\n\n const relative = symlinkPathsSync(srcpath, dstpath)\n srcpath = relative.toDst\n type = symlinkTypeSync(relative.toCwd, type)\n const dir = path.dirname(dstpath)\n const exists = fs.existsSync(dir)\n if (exists) return fs.symlinkSync(srcpath, dstpath, type)\n mkdirsSync(dir)\n return fs.symlinkSync(srcpath, dstpath, type)\n}\n\nmodule.exports = {\n createSymlink: u(createSymlink),\n createSymlinkSync\n}\n","'use strict'\n// This is adapted from https://github.com/normalize/mz\n// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\n\nconst api = [\n 'access',\n 'appendFile',\n 'chmod',\n 'chown',\n 'close',\n 'copyFile',\n 'fchmod',\n 'fchown',\n 'fdatasync',\n 'fstat',\n 'fsync',\n 'ftruncate',\n 'futimes',\n 'lchmod',\n 'lchown',\n 'link',\n 'lstat',\n 'mkdir',\n 'mkdtemp',\n 'open',\n 'opendir',\n 'readdir',\n 'readFile',\n 'readlink',\n 'realpath',\n 'rename',\n 'rm',\n 'rmdir',\n 'stat',\n 'symlink',\n 'truncate',\n 'unlink',\n 'utimes',\n 'writeFile'\n].filter(key => {\n // Some commands are not available on some systems. Ex:\n // fs.opendir was added in Node.js v12.12.0\n // fs.rm was added in Node.js v14.14.0\n // fs.lchown is not available on at least some Linux\n return typeof fs[key] === 'function'\n})\n\n// Export cloned fs:\nObject.assign(exports, fs)\n\n// Universalify async methods:\napi.forEach(method => {\n exports[method] = u(fs[method])\n})\n\n// We differ from mz/fs in that we still ship the old, broken, fs.exists()\n// since we are a drop-in replacement for the native module\nexports.exists = function (filename, callback) {\n if (typeof callback === 'function') {\n return fs.exists(filename, callback)\n }\n return new Promise(resolve => {\n return fs.exists(filename, resolve)\n })\n}\n\n// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args\n\nexports.read = function (fd, buffer, offset, length, position, callback) {\n if (typeof callback === 'function') {\n return fs.read(fd, buffer, offset, length, position, callback)\n }\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {\n if (err) return reject(err)\n resolve({ bytesRead, buffer })\n })\n })\n}\n\n// Function signature can be\n// fs.write(fd, buffer[, offset[, length[, position]]], callback)\n// OR\n// fs.write(fd, string[, position[, encoding]], callback)\n// We need to handle both cases, so we use ...args\nexports.write = function (fd, buffer, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.write(fd, buffer, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffer })\n })\n })\n}\n\n// fs.writev only available in Node v12.9.0+\nif (typeof fs.writev === 'function') {\n // Function signature is\n // s.writev(fd, buffers[, position], callback)\n // We need to handle the optional arg, so we use ...args\n exports.writev = function (fd, buffers, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.writev(fd, buffers, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffers })\n })\n })\n }\n}\n\n// fs.realpath.native sometimes not available if fs is monkey-patched\nif (typeof fs.realpath.native === 'function') {\n exports.realpath.native = u(fs.realpath.native)\n} else {\n process.emitWarning(\n 'fs.realpath.native is not a function. Is fs being monkey-patched?',\n 'Warning', 'fs-extra-WARN0003'\n )\n}\n","'use strict'\n\nmodule.exports = {\n // Export promiseified graceful-fs:\n ...require('./fs'),\n // Export extra methods:\n ...require('./copy'),\n ...require('./empty'),\n ...require('./ensure'),\n ...require('./json'),\n ...require('./mkdirs'),\n ...require('./move'),\n ...require('./output-file'),\n ...require('./path-exists'),\n ...require('./remove')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst jsonFile = require('./jsonfile')\n\njsonFile.outputJson = u(require('./output-json'))\njsonFile.outputJsonSync = require('./output-json-sync')\n// aliases\njsonFile.outputJSON = jsonFile.outputJson\njsonFile.outputJSONSync = jsonFile.outputJsonSync\njsonFile.writeJSON = jsonFile.writeJson\njsonFile.writeJSONSync = jsonFile.writeJsonSync\njsonFile.readJSON = jsonFile.readJson\njsonFile.readJSONSync = jsonFile.readJsonSync\n\nmodule.exports = jsonFile\n","'use strict'\n\nconst jsonFile = require('jsonfile')\n\nmodule.exports = {\n // jsonfile exports\n readJson: jsonFile.readFile,\n readJsonSync: jsonFile.readFileSync,\n writeJson: jsonFile.writeFile,\n writeJsonSync: jsonFile.writeFileSync\n}\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFileSync } = require('../output-file')\n\nfunction outputJsonSync (file, data, options) {\n const str = stringify(data, options)\n\n outputFileSync(file, str, options)\n}\n\nmodule.exports = outputJsonSync\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFile } = require('../output-file')\n\nasync function outputJson (file, data, options = {}) {\n const str = stringify(data, options)\n\n await outputFile(file, str, options)\n}\n\nmodule.exports = outputJson\n","'use strict'\nconst u = require('universalify').fromPromise\nconst { makeDir: _makeDir, makeDirSync } = require('./make-dir')\nconst makeDir = u(_makeDir)\n\nmodule.exports = {\n mkdirs: makeDir,\n mkdirsSync: makeDirSync,\n // alias\n mkdirp: makeDir,\n mkdirpSync: makeDirSync,\n ensureDir: makeDir,\n ensureDirSync: makeDirSync\n}\n","'use strict'\nconst fs = require('../fs')\nconst { checkPath } = require('./utils')\n\nconst getMode = options => {\n const defaults = { mode: 0o777 }\n if (typeof options === 'number') return options\n return ({ ...defaults, ...options }).mode\n}\n\nmodule.exports.makeDir = async (dir, options) => {\n checkPath(dir)\n\n return fs.mkdir(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n\nmodule.exports.makeDirSync = (dir, options) => {\n checkPath(dir)\n\n return fs.mkdirSync(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n","// Adapted from https://github.com/sindresorhus/make-dir\n// Copyright (c) Sindre Sorhus (sindresorhus.com)\n// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n'use strict'\nconst path = require('path')\n\n// https://github.com/nodejs/node/issues/8987\n// https://github.com/libuv/libuv/pull/1088\nmodule.exports.checkPath = function checkPath (pth) {\n if (process.platform === 'win32') {\n const pathHasInvalidWinCharacters = /[<>:\"|?*]/.test(pth.replace(path.parse(pth).root, ''))\n\n if (pathHasInvalidWinCharacters) {\n const error = new Error(`Path contains invalid characters: ${pth}`)\n error.code = 'EINVAL'\n throw error\n }\n }\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n move: u(require('./move')),\n moveSync: require('./move-sync')\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copySync = require('../copy').copySync\nconst removeSync = require('../remove').removeSync\nconst mkdirpSync = require('../mkdirs').mkdirpSync\nconst stat = require('../util/stat')\n\nfunction moveSync (src, dest, opts) {\n opts = opts || {}\n const overwrite = opts.overwrite || opts.clobber || false\n\n const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'move')\n if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))\n return doRename(src, dest, overwrite, isChangingCase)\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase) {\n if (isChangingCase) return rename(src, dest, overwrite)\n if (overwrite) {\n removeSync(dest)\n return rename(src, dest, overwrite)\n }\n if (fs.existsSync(dest)) throw new Error('dest already exists.')\n return rename(src, dest, overwrite)\n}\n\nfunction rename (src, dest, overwrite) {\n try {\n fs.renameSync(src, dest)\n } catch (err) {\n if (err.code !== 'EXDEV') throw err\n return moveAcrossDevice(src, dest, overwrite)\n }\n}\n\nfunction moveAcrossDevice (src, dest, overwrite) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copySync(src, dest, opts)\n return removeSync(src)\n}\n\nmodule.exports = moveSync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copy = require('../copy').copy\nconst remove = require('../remove').remove\nconst mkdirp = require('../mkdirs').mkdirp\nconst pathExists = require('../path-exists').pathExists\nconst stat = require('../util/stat')\n\nfunction move (src, dest, opts, cb) {\n if (typeof opts === 'function') {\n cb = opts\n opts = {}\n }\n\n opts = opts || {}\n\n const overwrite = opts.overwrite || opts.clobber || false\n\n stat.checkPaths(src, dest, 'move', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, isChangingCase = false } = stats\n stat.checkParentPaths(src, srcStat, dest, 'move', err => {\n if (err) return cb(err)\n if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb)\n mkdirp(path.dirname(dest), err => {\n if (err) return cb(err)\n return doRename(src, dest, overwrite, isChangingCase, cb)\n })\n })\n })\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase, cb) {\n if (isChangingCase) return rename(src, dest, overwrite, cb)\n if (overwrite) {\n return remove(dest, err => {\n if (err) return cb(err)\n return rename(src, dest, overwrite, cb)\n })\n }\n pathExists(dest, (err, destExists) => {\n if (err) return cb(err)\n if (destExists) return cb(new Error('dest already exists.'))\n return rename(src, dest, overwrite, cb)\n })\n}\n\nfunction rename (src, dest, overwrite, cb) {\n fs.rename(src, dest, err => {\n if (!err) return cb()\n if (err.code !== 'EXDEV') return cb(err)\n return moveAcrossDevice(src, dest, overwrite, cb)\n })\n}\n\nfunction moveAcrossDevice (src, dest, overwrite, cb) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copy(src, dest, opts, err => {\n if (err) return cb(err)\n return remove(src, cb)\n })\n}\n\nmodule.exports = move\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction outputFile (file, data, encoding, callback) {\n if (typeof encoding === 'function') {\n callback = encoding\n encoding = 'utf8'\n }\n\n const dir = path.dirname(file)\n pathExists(dir, (err, itDoes) => {\n if (err) return callback(err)\n if (itDoes) return fs.writeFile(file, data, encoding, callback)\n\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n\n fs.writeFile(file, data, encoding, callback)\n })\n })\n}\n\nfunction outputFileSync (file, ...args) {\n const dir = path.dirname(file)\n if (fs.existsSync(dir)) {\n return fs.writeFileSync(file, ...args)\n }\n mkdir.mkdirsSync(dir)\n fs.writeFileSync(file, ...args)\n}\n\nmodule.exports = {\n outputFile: u(outputFile),\n outputFileSync\n}\n","'use strict'\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\n\nfunction pathExists (path) {\n return fs.access(path).then(() => true).catch(() => false)\n}\n\nmodule.exports = {\n pathExists: u(pathExists),\n pathExistsSync: fs.existsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst u = require('universalify').fromCallback\nconst rimraf = require('./rimraf')\n\nfunction remove (path, callback) {\n // Node 14.14.0+\n if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback)\n rimraf(path, callback)\n}\n\nfunction removeSync (path) {\n // Node 14.14.0+\n if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true })\n rimraf.sync(path)\n}\n\nmodule.exports = {\n remove: u(remove),\n removeSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst assert = require('assert')\n\nconst isWindows = (process.platform === 'win32')\n\nfunction defaults (options) {\n const methods = [\n 'unlink',\n 'chmod',\n 'stat',\n 'lstat',\n 'rmdir',\n 'readdir'\n ]\n methods.forEach(m => {\n options[m] = options[m] || fs[m]\n m = m + 'Sync'\n options[m] = options[m] || fs[m]\n })\n\n options.maxBusyTries = options.maxBusyTries || 3\n}\n\nfunction rimraf (p, options, cb) {\n let busyTries = 0\n\n if (typeof options === 'function') {\n cb = options\n options = {}\n }\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')\n assert(options, 'rimraf: invalid options argument provided')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n defaults(options)\n\n rimraf_(p, options, function CB (er) {\n if (er) {\n if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&\n busyTries < options.maxBusyTries) {\n busyTries++\n const time = busyTries * 100\n // try again, with the same exact callback as this one.\n return setTimeout(() => rimraf_(p, options, CB), time)\n }\n\n // already gone\n if (er.code === 'ENOENT') er = null\n }\n\n cb(er)\n })\n}\n\n// Two possible strategies.\n// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR\n// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR\n//\n// Both result in an extra syscall when you guess wrong. However, there\n// are likely far more normal files in the world than directories. This\n// is based on the assumption that a the average number of files per\n// directory is >= 1.\n//\n// If anyone ever complains about this, then I guess the strategy could\n// be made configurable somehow. But until then, YAGNI.\nfunction rimraf_ (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // sunos lets the root user unlink directories, which is... weird.\n // so we have to lstat here and make sure it's not a dir.\n options.lstat(p, (er, st) => {\n if (er && er.code === 'ENOENT') {\n return cb(null)\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er && er.code === 'EPERM' && isWindows) {\n return fixWinEPERM(p, options, er, cb)\n }\n\n if (st && st.isDirectory()) {\n return rmdir(p, options, er, cb)\n }\n\n options.unlink(p, er => {\n if (er) {\n if (er.code === 'ENOENT') {\n return cb(null)\n }\n if (er.code === 'EPERM') {\n return (isWindows)\n ? fixWinEPERM(p, options, er, cb)\n : rmdir(p, options, er, cb)\n }\n if (er.code === 'EISDIR') {\n return rmdir(p, options, er, cb)\n }\n }\n return cb(er)\n })\n })\n}\n\nfunction fixWinEPERM (p, options, er, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.chmod(p, 0o666, er2 => {\n if (er2) {\n cb(er2.code === 'ENOENT' ? null : er)\n } else {\n options.stat(p, (er3, stats) => {\n if (er3) {\n cb(er3.code === 'ENOENT' ? null : er)\n } else if (stats.isDirectory()) {\n rmdir(p, options, er, cb)\n } else {\n options.unlink(p, cb)\n }\n })\n }\n })\n}\n\nfunction fixWinEPERMSync (p, options, er) {\n let stats\n\n assert(p)\n assert(options)\n\n try {\n options.chmodSync(p, 0o666)\n } catch (er2) {\n if (er2.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n try {\n stats = options.statSync(p)\n } catch (er3) {\n if (er3.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n if (stats.isDirectory()) {\n rmdirSync(p, options, er)\n } else {\n options.unlinkSync(p)\n }\n}\n\nfunction rmdir (p, options, originalEr, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)\n // if we guessed wrong, and it's not a directory, then\n // raise the original error.\n options.rmdir(p, er => {\n if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {\n rmkids(p, options, cb)\n } else if (er && er.code === 'ENOTDIR') {\n cb(originalEr)\n } else {\n cb(er)\n }\n })\n}\n\nfunction rmkids (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.readdir(p, (er, files) => {\n if (er) return cb(er)\n\n let n = files.length\n let errState\n\n if (n === 0) return options.rmdir(p, cb)\n\n files.forEach(f => {\n rimraf(path.join(p, f), options, er => {\n if (errState) {\n return\n }\n if (er) return cb(errState = er)\n if (--n === 0) {\n options.rmdir(p, cb)\n }\n })\n })\n })\n}\n\n// this looks simpler, and is strictly *faster*, but will\n// tie up the JavaScript thread and fail on excessively\n// deep directory trees.\nfunction rimrafSync (p, options) {\n let st\n\n options = options || {}\n defaults(options)\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert(options, 'rimraf: missing options')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n try {\n st = options.lstatSync(p)\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er.code === 'EPERM' && isWindows) {\n fixWinEPERMSync(p, options, er)\n }\n }\n\n try {\n // sunos lets the root user unlink directories, which is... weird.\n if (st && st.isDirectory()) {\n rmdirSync(p, options, null)\n } else {\n options.unlinkSync(p)\n }\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n } else if (er.code === 'EPERM') {\n return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)\n } else if (er.code !== 'EISDIR') {\n throw er\n }\n rmdirSync(p, options, er)\n }\n}\n\nfunction rmdirSync (p, options, originalEr) {\n assert(p)\n assert(options)\n\n try {\n options.rmdirSync(p)\n } catch (er) {\n if (er.code === 'ENOTDIR') {\n throw originalEr\n } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {\n rmkidsSync(p, options)\n } else if (er.code !== 'ENOENT') {\n throw er\n }\n }\n}\n\nfunction rmkidsSync (p, options) {\n assert(p)\n assert(options)\n options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))\n\n if (isWindows) {\n // We only end up here once we got ENOTEMPTY at least once, and\n // at this point, we are guaranteed to have removed all the kids.\n // So, we know that it won't be ENOENT or ENOTDIR or anything else.\n // try really hard to delete stuff on windows, because it has a\n // PROFOUNDLY annoying habit of not closing handles promptly when\n // files are deleted, resulting in spurious ENOTEMPTY errors.\n const startTime = Date.now()\n do {\n try {\n const ret = options.rmdirSync(p, options)\n return ret\n } catch {}\n } while (Date.now() - startTime < 500) // give up after 500ms\n } else {\n const ret = options.rmdirSync(p, options)\n return ret\n }\n}\n\nmodule.exports = rimraf\nrimraf.sync = rimrafSync\n","'use strict'\n\nconst fs = require('../fs')\nconst path = require('path')\nconst util = require('util')\n\nfunction getStats (src, dest, opts) {\n const statFunc = opts.dereference\n ? (file) => fs.stat(file, { bigint: true })\n : (file) => fs.lstat(file, { bigint: true })\n return Promise.all([\n statFunc(src),\n statFunc(dest).catch(err => {\n if (err.code === 'ENOENT') return null\n throw err\n })\n ]).then(([srcStat, destStat]) => ({ srcStat, destStat }))\n}\n\nfunction getStatsSync (src, dest, opts) {\n let destStat\n const statFunc = opts.dereference\n ? (file) => fs.statSync(file, { bigint: true })\n : (file) => fs.lstatSync(file, { bigint: true })\n const srcStat = statFunc(src)\n try {\n destStat = statFunc(dest)\n } catch (err) {\n if (err.code === 'ENOENT') return { srcStat, destStat: null }\n throw err\n }\n return { srcStat, destStat }\n}\n\nfunction checkPaths (src, dest, funcName, opts, cb) {\n util.callbackify(getStats)(src, dest, opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return cb(null, { srcStat, destStat, isChangingCase: true })\n }\n return cb(new Error('Source and destination must not be the same.'))\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`))\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return cb(null, { srcStat, destStat })\n })\n}\n\nfunction checkPathsSync (src, dest, funcName, opts) {\n const { srcStat, destStat } = getStatsSync(src, dest, opts)\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return { srcStat, destStat, isChangingCase: true }\n }\n throw new Error('Source and destination must not be the same.')\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return { srcStat, destStat }\n}\n\n// recursively check if dest parent is a subdirectory of src.\n// It works for all file types including symlinks since it\n// checks the src and dest inodes. It starts from the deepest\n// parent and stops once it reaches the src parent or the root path.\nfunction checkParentPaths (src, srcStat, dest, funcName, cb) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()\n fs.stat(destParent, { bigint: true }, (err, destStat) => {\n if (err) {\n if (err.code === 'ENOENT') return cb()\n return cb(err)\n }\n if (areIdentical(srcStat, destStat)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return checkParentPaths(src, srcStat, destParent, funcName, cb)\n })\n}\n\nfunction checkParentPathsSync (src, srcStat, dest, funcName) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return\n let destStat\n try {\n destStat = fs.statSync(destParent, { bigint: true })\n } catch (err) {\n if (err.code === 'ENOENT') return\n throw err\n }\n if (areIdentical(srcStat, destStat)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return checkParentPathsSync(src, srcStat, destParent, funcName)\n}\n\nfunction areIdentical (srcStat, destStat) {\n return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev\n}\n\n// return true if dest is a subdir of src, otherwise false.\n// It only checks the path strings.\nfunction isSrcSubdir (src, dest) {\n const srcArr = path.resolve(src).split(path.sep).filter(i => i)\n const destArr = path.resolve(dest).split(path.sep).filter(i => i)\n return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)\n}\n\nfunction errMsg (src, dest, funcName) {\n return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`\n}\n\nmodule.exports = {\n checkPaths,\n checkPathsSync,\n checkParentPaths,\n checkParentPathsSync,\n isSrcSubdir,\n areIdentical\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction utimesMillis (path, atime, mtime, callback) {\n // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)\n fs.open(path, 'r+', (err, fd) => {\n if (err) return callback(err)\n fs.futimes(fd, atime, mtime, futimesErr => {\n fs.close(fd, closeErr => {\n if (callback) callback(futimesErr || closeErr)\n })\n })\n })\n}\n\nfunction utimesMillisSync (path, atime, mtime) {\n const fd = fs.openSync(path, 'r+')\n fs.futimesSync(fd, atime, mtime)\n return fs.closeSync(fd)\n}\n\nmodule.exports = {\n utimesMillis,\n utimesMillisSync\n}\n","'use strict'\n\nmodule.exports = clone\n\nvar getPrototypeOf = Object.getPrototypeOf || function (obj) {\n return obj.__proto__\n}\n\nfunction clone (obj) {\n if (obj === null || typeof obj !== 'object')\n return obj\n\n if (obj instanceof Object)\n var copy = { __proto__: getPrototypeOf(obj) }\n else\n var copy = Object.create(null)\n\n Object.getOwnPropertyNames(obj).forEach(function (key) {\n Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))\n })\n\n return copy\n}\n","var fs = require('fs')\nvar polyfills = require('./polyfills.js')\nvar legacy = require('./legacy-streams.js')\nvar clone = require('./clone.js')\n\nvar util = require('util')\n\n/* istanbul ignore next - node 0.x polyfill */\nvar gracefulQueue\nvar previousSymbol\n\n/* istanbul ignore else - node 0.x polyfill */\nif (typeof Symbol === 'function' && typeof Symbol.for === 'function') {\n gracefulQueue = Symbol.for('graceful-fs.queue')\n // This is used in testing by future versions\n previousSymbol = Symbol.for('graceful-fs.previous')\n} else {\n gracefulQueue = '___graceful-fs.queue'\n previousSymbol = '___graceful-fs.previous'\n}\n\nfunction noop () {}\n\nfunction publishQueue(context, queue) {\n Object.defineProperty(context, gracefulQueue, {\n get: function() {\n return queue\n }\n })\n}\n\nvar debug = noop\nif (util.debuglog)\n debug = util.debuglog('gfs4')\nelse if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || ''))\n debug = function() {\n var m = util.format.apply(util, arguments)\n m = 'GFS4: ' + m.split(/\\n/).join('\\nGFS4: ')\n console.error(m)\n }\n\n// Once time initialization\nif (!fs[gracefulQueue]) {\n // This queue can be shared by multiple loaded instances\n var queue = global[gracefulQueue] || []\n publishQueue(fs, queue)\n\n // Patch fs.close/closeSync to shared queue version, because we need\n // to retry() whenever a close happens *anywhere* in the program.\n // This is essential when multiple graceful-fs instances are\n // in play at the same time.\n fs.close = (function (fs$close) {\n function close (fd, cb) {\n return fs$close.call(fs, fd, function (err) {\n // This function uses the graceful-fs shared queue\n if (!err) {\n resetQueue()\n }\n\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n })\n }\n\n Object.defineProperty(close, previousSymbol, {\n value: fs$close\n })\n return close\n })(fs.close)\n\n fs.closeSync = (function (fs$closeSync) {\n function closeSync (fd) {\n // This function uses the graceful-fs shared queue\n fs$closeSync.apply(fs, arguments)\n resetQueue()\n }\n\n Object.defineProperty(closeSync, previousSymbol, {\n value: fs$closeSync\n })\n return closeSync\n })(fs.closeSync)\n\n if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || '')) {\n process.on('exit', function() {\n debug(fs[gracefulQueue])\n require('assert').equal(fs[gracefulQueue].length, 0)\n })\n }\n}\n\nif (!global[gracefulQueue]) {\n publishQueue(global, fs[gracefulQueue]);\n}\n\nmodule.exports = patch(clone(fs))\nif (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {\n module.exports = patch(fs)\n fs.__patched = true;\n}\n\nfunction patch (fs) {\n // Everything that references the open() function needs to be in here\n polyfills(fs)\n fs.gracefulify = patch\n\n fs.createReadStream = createReadStream\n fs.createWriteStream = createWriteStream\n var fs$readFile = fs.readFile\n fs.readFile = readFile\n function readFile (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$readFile(path, options, cb)\n\n function go$readFile (path, options, cb, startTime) {\n return fs$readFile(path, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$writeFile = fs.writeFile\n fs.writeFile = writeFile\n function writeFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$writeFile(path, data, options, cb)\n\n function go$writeFile (path, data, options, cb, startTime) {\n return fs$writeFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$appendFile = fs.appendFile\n if (fs$appendFile)\n fs.appendFile = appendFile\n function appendFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$appendFile(path, data, options, cb)\n\n function go$appendFile (path, data, options, cb, startTime) {\n return fs$appendFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$copyFile = fs.copyFile\n if (fs$copyFile)\n fs.copyFile = copyFile\n function copyFile (src, dest, flags, cb) {\n if (typeof flags === 'function') {\n cb = flags\n flags = 0\n }\n return go$copyFile(src, dest, flags, cb)\n\n function go$copyFile (src, dest, flags, cb, startTime) {\n return fs$copyFile(src, dest, flags, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$readdir = fs.readdir\n fs.readdir = readdir\n var noReaddirOptionVersions = /^v[0-5]\\./\n function readdir (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n var go$readdir = noReaddirOptionVersions.test(process.version)\n ? function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n : function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, options, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n\n return go$readdir(path, options, cb)\n\n function fs$readdirCallback (path, options, cb, startTime) {\n return function (err, files) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([\n go$readdir,\n [path, options, cb],\n err,\n startTime || Date.now(),\n Date.now()\n ])\n else {\n if (files && files.sort)\n files.sort()\n\n if (typeof cb === 'function')\n cb.call(this, err, files)\n }\n }\n }\n }\n\n if (process.version.substr(0, 4) === 'v0.8') {\n var legStreams = legacy(fs)\n ReadStream = legStreams.ReadStream\n WriteStream = legStreams.WriteStream\n }\n\n var fs$ReadStream = fs.ReadStream\n if (fs$ReadStream) {\n ReadStream.prototype = Object.create(fs$ReadStream.prototype)\n ReadStream.prototype.open = ReadStream$open\n }\n\n var fs$WriteStream = fs.WriteStream\n if (fs$WriteStream) {\n WriteStream.prototype = Object.create(fs$WriteStream.prototype)\n WriteStream.prototype.open = WriteStream$open\n }\n\n Object.defineProperty(fs, 'ReadStream', {\n get: function () {\n return ReadStream\n },\n set: function (val) {\n ReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n Object.defineProperty(fs, 'WriteStream', {\n get: function () {\n return WriteStream\n },\n set: function (val) {\n WriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n // legacy names\n var FileReadStream = ReadStream\n Object.defineProperty(fs, 'FileReadStream', {\n get: function () {\n return FileReadStream\n },\n set: function (val) {\n FileReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n var FileWriteStream = WriteStream\n Object.defineProperty(fs, 'FileWriteStream', {\n get: function () {\n return FileWriteStream\n },\n set: function (val) {\n FileWriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n function ReadStream (path, options) {\n if (this instanceof ReadStream)\n return fs$ReadStream.apply(this, arguments), this\n else\n return ReadStream.apply(Object.create(ReadStream.prototype), arguments)\n }\n\n function ReadStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n if (that.autoClose)\n that.destroy()\n\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n that.read()\n }\n })\n }\n\n function WriteStream (path, options) {\n if (this instanceof WriteStream)\n return fs$WriteStream.apply(this, arguments), this\n else\n return WriteStream.apply(Object.create(WriteStream.prototype), arguments)\n }\n\n function WriteStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n that.destroy()\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n }\n })\n }\n\n function createReadStream (path, options) {\n return new fs.ReadStream(path, options)\n }\n\n function createWriteStream (path, options) {\n return new fs.WriteStream(path, options)\n }\n\n var fs$open = fs.open\n fs.open = open\n function open (path, flags, mode, cb) {\n if (typeof mode === 'function')\n cb = mode, mode = null\n\n return go$open(path, flags, mode, cb)\n\n function go$open (path, flags, mode, cb, startTime) {\n return fs$open(path, flags, mode, function (err, fd) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n return fs\n}\n\nfunction enqueue (elem) {\n debug('ENQUEUE', elem[0].name, elem[1])\n fs[gracefulQueue].push(elem)\n retry()\n}\n\n// keep track of the timeout between retry() calls\nvar retryTimer\n\n// reset the startTime and lastTime to now\n// this resets the start of the 60 second overall timeout as well as the\n// delay between attempts so that we'll retry these jobs sooner\nfunction resetQueue () {\n var now = Date.now()\n for (var i = 0; i < fs[gracefulQueue].length; ++i) {\n // entries that are only a length of 2 are from an older version, don't\n // bother modifying those since they'll be retried anyway.\n if (fs[gracefulQueue][i].length > 2) {\n fs[gracefulQueue][i][3] = now // startTime\n fs[gracefulQueue][i][4] = now // lastTime\n }\n }\n // call retry to make sure we're actively processing the queue\n retry()\n}\n\nfunction retry () {\n // clear the timer and remove it to help prevent unintended concurrency\n clearTimeout(retryTimer)\n retryTimer = undefined\n\n if (fs[gracefulQueue].length === 0)\n return\n\n var elem = fs[gracefulQueue].shift()\n var fn = elem[0]\n var args = elem[1]\n // these items may be unset if they were added by an older graceful-fs\n var err = elem[2]\n var startTime = elem[3]\n var lastTime = elem[4]\n\n // if we don't have a startTime we have no way of knowing if we've waited\n // long enough, so go ahead and retry this item now\n if (startTime === undefined) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args)\n } else if (Date.now() - startTime >= 60000) {\n // it's been more than 60 seconds total, bail now\n debug('TIMEOUT', fn.name, args)\n var cb = args.pop()\n if (typeof cb === 'function')\n cb.call(null, err)\n } else {\n // the amount of time between the last attempt and right now\n var sinceAttempt = Date.now() - lastTime\n // the amount of time between when we first tried, and when we last tried\n // rounded up to at least 1\n var sinceStart = Math.max(lastTime - startTime, 1)\n // backoff. wait longer than the total time we've been retrying, but only\n // up to a maximum of 100ms\n var desiredDelay = Math.min(sinceStart * 1.2, 100)\n // it's been long enough since the last retry, do it again\n if (sinceAttempt >= desiredDelay) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args.concat([startTime]))\n } else {\n // if we can't do this job yet, push it to the end of the queue\n // and let the next iteration check again\n fs[gracefulQueue].push(elem)\n }\n }\n\n // schedule our next run if one isn't already scheduled\n if (retryTimer === undefined) {\n retryTimer = setTimeout(retry, 0)\n }\n}\n","var Stream = require('stream').Stream\n\nmodule.exports = legacy\n\nfunction legacy (fs) {\n return {\n ReadStream: ReadStream,\n WriteStream: WriteStream\n }\n\n function ReadStream (path, options) {\n if (!(this instanceof ReadStream)) return new ReadStream(path, options);\n\n Stream.call(this);\n\n var self = this;\n\n this.path = path;\n this.fd = null;\n this.readable = true;\n this.paused = false;\n\n this.flags = 'r';\n this.mode = 438; /*=0666*/\n this.bufferSize = 64 * 1024;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.encoding) this.setEncoding(this.encoding);\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.end === undefined) {\n this.end = Infinity;\n } else if ('number' !== typeof this.end) {\n throw TypeError('end must be a Number');\n }\n\n if (this.start > this.end) {\n throw new Error('start must be <= end');\n }\n\n this.pos = this.start;\n }\n\n if (this.fd !== null) {\n process.nextTick(function() {\n self._read();\n });\n return;\n }\n\n fs.open(this.path, this.flags, this.mode, function (err, fd) {\n if (err) {\n self.emit('error', err);\n self.readable = false;\n return;\n }\n\n self.fd = fd;\n self.emit('open', fd);\n self._read();\n })\n }\n\n function WriteStream (path, options) {\n if (!(this instanceof WriteStream)) return new WriteStream(path, options);\n\n Stream.call(this);\n\n this.path = path;\n this.fd = null;\n this.writable = true;\n\n this.flags = 'w';\n this.encoding = 'binary';\n this.mode = 438; /*=0666*/\n this.bytesWritten = 0;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.start < 0) {\n throw new Error('start must be >= zero');\n }\n\n this.pos = this.start;\n }\n\n this.busy = false;\n this._queue = [];\n\n if (this.fd === null) {\n this._open = fs.open;\n this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);\n this.flush();\n }\n }\n}\n","var constants = require('constants')\n\nvar origCwd = process.cwd\nvar cwd = null\n\nvar platform = process.env.GRACEFUL_FS_PLATFORM || process.platform\n\nprocess.cwd = function() {\n if (!cwd)\n cwd = origCwd.call(process)\n return cwd\n}\ntry {\n process.cwd()\n} catch (er) {}\n\n// This check is needed until node.js 12 is required\nif (typeof process.chdir === 'function') {\n var chdir = process.chdir\n process.chdir = function (d) {\n cwd = null\n chdir.call(process, d)\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)\n}\n\nmodule.exports = patch\n\nfunction patch (fs) {\n // (re-)implement some things that are known busted or missing.\n\n // lchmod, broken prior to 0.6.2\n // back-port the fix here.\n if (constants.hasOwnProperty('O_SYMLINK') &&\n process.version.match(/^v0\\.6\\.[0-2]|^v0\\.5\\./)) {\n patchLchmod(fs)\n }\n\n // lutimes implementation, or no-op\n if (!fs.lutimes) {\n patchLutimes(fs)\n }\n\n // https://github.com/isaacs/node-graceful-fs/issues/4\n // Chown should not fail on einval or eperm if non-root.\n // It should not fail on enosys ever, as this just indicates\n // that a fs doesn't support the intended operation.\n\n fs.chown = chownFix(fs.chown)\n fs.fchown = chownFix(fs.fchown)\n fs.lchown = chownFix(fs.lchown)\n\n fs.chmod = chmodFix(fs.chmod)\n fs.fchmod = chmodFix(fs.fchmod)\n fs.lchmod = chmodFix(fs.lchmod)\n\n fs.chownSync = chownFixSync(fs.chownSync)\n fs.fchownSync = chownFixSync(fs.fchownSync)\n fs.lchownSync = chownFixSync(fs.lchownSync)\n\n fs.chmodSync = chmodFixSync(fs.chmodSync)\n fs.fchmodSync = chmodFixSync(fs.fchmodSync)\n fs.lchmodSync = chmodFixSync(fs.lchmodSync)\n\n fs.stat = statFix(fs.stat)\n fs.fstat = statFix(fs.fstat)\n fs.lstat = statFix(fs.lstat)\n\n fs.statSync = statFixSync(fs.statSync)\n fs.fstatSync = statFixSync(fs.fstatSync)\n fs.lstatSync = statFixSync(fs.lstatSync)\n\n // if lchmod/lchown do not exist, then make them no-ops\n if (fs.chmod && !fs.lchmod) {\n fs.lchmod = function (path, mode, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchmodSync = function () {}\n }\n if (fs.chown && !fs.lchown) {\n fs.lchown = function (path, uid, gid, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchownSync = function () {}\n }\n\n // on Windows, A/V software can lock the directory, causing this\n // to fail with an EACCES or EPERM if the directory contains newly\n // created files. Try again on failure, for up to 60 seconds.\n\n // Set the timeout this long because some Windows Anti-Virus, such as Parity\n // bit9, may lock files for up to a minute, causing npm package install\n // failures. Also, take care to yield the scheduler. Windows scheduling gives\n // CPU to a busy looping process, which can cause the program causing the lock\n // contention to be starved of CPU by node, so the contention doesn't resolve.\n if (platform === \"win32\") {\n fs.rename = typeof fs.rename !== 'function' ? fs.rename\n : (function (fs$rename) {\n function rename (from, to, cb) {\n var start = Date.now()\n var backoff = 0;\n fs$rename(from, to, function CB (er) {\n if (er\n && (er.code === \"EACCES\" || er.code === \"EPERM\")\n && Date.now() - start < 60000) {\n setTimeout(function() {\n fs.stat(to, function (stater, st) {\n if (stater && stater.code === \"ENOENT\")\n fs$rename(from, to, CB);\n else\n cb(er)\n })\n }, backoff)\n if (backoff < 100)\n backoff += 10;\n return;\n }\n if (cb) cb(er)\n })\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)\n return rename\n })(fs.rename)\n }\n\n // if read() returns EAGAIN, then just try it again.\n fs.read = typeof fs.read !== 'function' ? fs.read\n : (function (fs$read) {\n function read (fd, buffer, offset, length, position, callback_) {\n var callback\n if (callback_ && typeof callback_ === 'function') {\n var eagCounter = 0\n callback = function (er, _, __) {\n if (er && er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n callback_.apply(this, arguments)\n }\n }\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n\n // This ensures `util.promisify` works as it does for native `fs.read`.\n if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)\n return read\n })(fs.read)\n\n fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync\n : (function (fs$readSync) { return function (fd, buffer, offset, length, position) {\n var eagCounter = 0\n while (true) {\n try {\n return fs$readSync.call(fs, fd, buffer, offset, length, position)\n } catch (er) {\n if (er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n continue\n }\n throw er\n }\n }\n }})(fs.readSync)\n\n function patchLchmod (fs) {\n fs.lchmod = function (path, mode, callback) {\n fs.open( path\n , constants.O_WRONLY | constants.O_SYMLINK\n , mode\n , function (err, fd) {\n if (err) {\n if (callback) callback(err)\n return\n }\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n fs.fchmod(fd, mode, function (err) {\n fs.close(fd, function(err2) {\n if (callback) callback(err || err2)\n })\n })\n })\n }\n\n fs.lchmodSync = function (path, mode) {\n var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)\n\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n var threw = true\n var ret\n try {\n ret = fs.fchmodSync(fd, mode)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n }\n\n function patchLutimes (fs) {\n if (constants.hasOwnProperty(\"O_SYMLINK\") && fs.futimes) {\n fs.lutimes = function (path, at, mt, cb) {\n fs.open(path, constants.O_SYMLINK, function (er, fd) {\n if (er) {\n if (cb) cb(er)\n return\n }\n fs.futimes(fd, at, mt, function (er) {\n fs.close(fd, function (er2) {\n if (cb) cb(er || er2)\n })\n })\n })\n }\n\n fs.lutimesSync = function (path, at, mt) {\n var fd = fs.openSync(path, constants.O_SYMLINK)\n var ret\n var threw = true\n try {\n ret = fs.futimesSync(fd, at, mt)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n\n } else if (fs.futimes) {\n fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }\n fs.lutimesSync = function () {}\n }\n }\n\n function chmodFix (orig) {\n if (!orig) return orig\n return function (target, mode, cb) {\n return orig.call(fs, target, mode, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chmodFixSync (orig) {\n if (!orig) return orig\n return function (target, mode) {\n try {\n return orig.call(fs, target, mode)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n\n function chownFix (orig) {\n if (!orig) return orig\n return function (target, uid, gid, cb) {\n return orig.call(fs, target, uid, gid, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chownFixSync (orig) {\n if (!orig) return orig\n return function (target, uid, gid) {\n try {\n return orig.call(fs, target, uid, gid)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n function statFix (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n function callback (er, stats) {\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n if (cb) cb.apply(this, arguments)\n }\n return options ? orig.call(fs, target, options, callback)\n : orig.call(fs, target, callback)\n }\n }\n\n function statFixSync (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options) {\n var stats = options ? orig.call(fs, target, options)\n : orig.call(fs, target)\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n return stats;\n }\n }\n\n // ENOSYS means that the fs doesn't support the op. Just ignore\n // that, because it doesn't matter.\n //\n // if there's no getuid, or if getuid() is something other\n // than 0, and the error is EINVAL or EPERM, then just ignore\n // it.\n //\n // This specific case is a silent failure in cp, install, tar,\n // and most other unix tools that manage permissions.\n //\n // When running as root, or if other types of errors are\n // encountered, then it's strict.\n function chownErOk (er) {\n if (!er)\n return true\n\n if (er.code === \"ENOSYS\")\n return true\n\n var nonroot = !process.getuid || process.getuid() !== 0\n if (nonroot) {\n if (er.code === \"EINVAL\" || er.code === \"EPERM\")\n return true\n }\n\n return false\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nvar traverse = module.exports = function (schema, opts, cb) {\n // Legacy support for v0.3.1 and earlier.\n if (typeof opts == 'function') {\n cb = opts;\n opts = {};\n }\n\n cb = opts.cb || cb;\n var pre = (typeof cb == 'function') ? cb : cb.pre || function() {};\n var post = cb.post || function() {};\n\n _traverse(opts, pre, post, schema, '', schema);\n};\n\n\ntraverse.keywords = {\n additionalItems: true,\n items: true,\n contains: true,\n additionalProperties: true,\n propertyNames: true,\n not: true,\n if: true,\n then: true,\n else: true\n};\n\ntraverse.arrayKeywords = {\n items: true,\n allOf: true,\n anyOf: true,\n oneOf: true\n};\n\ntraverse.propsKeywords = {\n $defs: true,\n definitions: true,\n properties: true,\n patternProperties: true,\n dependencies: true\n};\n\ntraverse.skipKeywords = {\n default: true,\n enum: true,\n const: true,\n required: true,\n maximum: true,\n minimum: true,\n exclusiveMaximum: true,\n exclusiveMinimum: true,\n multipleOf: true,\n maxLength: true,\n minLength: true,\n pattern: true,\n format: true,\n maxItems: true,\n minItems: true,\n uniqueItems: true,\n maxProperties: true,\n minProperties: true\n};\n\n\nfunction _traverse(opts, pre, post, schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex) {\n if (schema && typeof schema == 'object' && !Array.isArray(schema)) {\n pre(schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex);\n for (var key in schema) {\n var sch = schema[key];\n if (Array.isArray(sch)) {\n if (key in traverse.arrayKeywords) {\n for (var i=0; i 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","'use strict'\n\nexports.fromCallback = function (fn) {\n return Object.defineProperty(function (...args) {\n if (typeof args[args.length - 1] === 'function') fn.apply(this, args)\n else {\n return new Promise((resolve, reject) => {\n fn.call(\n this,\n ...args,\n (err, res) => (err != null) ? reject(err) : resolve(res)\n )\n })\n }\n }, 'name', { value: fn.name })\n}\n\nexports.fromPromise = function (fn) {\n return Object.defineProperty(function (...args) {\n const cb = args[args.length - 1]\n if (typeof cb !== 'function') return fn.apply(this, args)\n else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)\n }, 'name', { value: fn.name })\n}\n","/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */\n(function (global, factory) {\n\ttypeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n\ttypeof define === 'function' && define.amd ? define(['exports'], factory) :\n\t(factory((global.URI = global.URI || {})));\n}(this, (function (exports) { 'use strict';\n\nfunction merge() {\n for (var _len = arguments.length, sets = Array(_len), _key = 0; _key < _len; _key++) {\n sets[_key] = arguments[_key];\n }\n\n if (sets.length > 1) {\n sets[0] = sets[0].slice(0, -1);\n var xl = sets.length - 1;\n for (var x = 1; x < xl; ++x) {\n sets[x] = sets[x].slice(1, -1);\n }\n sets[xl] = sets[xl].slice(1);\n return sets.join('');\n } else {\n return sets[0];\n }\n}\nfunction subexp(str) {\n return \"(?:\" + str + \")\";\n}\nfunction typeOf(o) {\n return o === undefined ? \"undefined\" : o === null ? \"null\" : Object.prototype.toString.call(o).split(\" \").pop().split(\"]\").shift().toLowerCase();\n}\nfunction toUpperCase(str) {\n return str.toUpperCase();\n}\nfunction toArray(obj) {\n return obj !== undefined && obj !== null ? obj instanceof Array ? obj : typeof obj.length !== \"number\" || obj.split || obj.setInterval || obj.call ? [obj] : Array.prototype.slice.call(obj) : [];\n}\nfunction assign(target, source) {\n var obj = target;\n if (source) {\n for (var key in source) {\n obj[key] = source[key];\n }\n }\n return obj;\n}\n\nfunction buildExps(isIRI) {\n var ALPHA$$ = \"[A-Za-z]\",\n CR$ = \"[\\\\x0D]\",\n DIGIT$$ = \"[0-9]\",\n DQUOTE$$ = \"[\\\\x22]\",\n HEXDIG$$ = merge(DIGIT$$, \"[A-Fa-f]\"),\n //case-insensitive\n LF$$ = \"[\\\\x0A]\",\n SP$$ = \"[\\\\x20]\",\n PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)),\n //expanded\n GEN_DELIMS$$ = \"[\\\\:\\\\/\\\\?\\\\#\\\\[\\\\]\\\\@]\",\n SUB_DELIMS$$ = \"[\\\\!\\\\$\\\\&\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\=]\",\n RESERVED$$ = merge(GEN_DELIMS$$, SUB_DELIMS$$),\n UCSCHAR$$ = isIRI ? \"[\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF]\" : \"[]\",\n //subset, excludes bidi control characters\n IPRIVATE$$ = isIRI ? \"[\\\\uE000-\\\\uF8FF]\" : \"[]\",\n //subset\n UNRESERVED$$ = merge(ALPHA$$, DIGIT$$, \"[\\\\-\\\\.\\\\_\\\\~]\", UCSCHAR$$),\n SCHEME$ = subexp(ALPHA$$ + merge(ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\") + \"*\"),\n USERINFO$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\")) + \"*\"),\n DEC_OCTET$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"[1-9]\" + DIGIT$$) + \"|\" + DIGIT$$),\n DEC_OCTET_RELAXED$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"0?[1-9]\" + DIGIT$$) + \"|0?0?\" + DIGIT$$),\n //relaxed parsing rules\n IPV4ADDRESS$ = subexp(DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$),\n H16$ = subexp(HEXDIG$$ + \"{1,4}\"),\n LS32$ = subexp(subexp(H16$ + \"\\\\:\" + H16$) + \"|\" + IPV4ADDRESS$),\n IPV6ADDRESS1$ = subexp(subexp(H16$ + \"\\\\:\") + \"{6}\" + LS32$),\n // 6( h16 \":\" ) ls32\n IPV6ADDRESS2$ = subexp(\"\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{5}\" + LS32$),\n // \"::\" 5( h16 \":\" ) ls32\n IPV6ADDRESS3$ = subexp(subexp(H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{4}\" + LS32$),\n //[ h16 ] \"::\" 4( h16 \":\" ) ls32\n IPV6ADDRESS4$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,1}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{3}\" + LS32$),\n //[ *1( h16 \":\" ) h16 ] \"::\" 3( h16 \":\" ) ls32\n IPV6ADDRESS5$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,2}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{2}\" + LS32$),\n //[ *2( h16 \":\" ) h16 ] \"::\" 2( h16 \":\" ) ls32\n IPV6ADDRESS6$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,3}\" + H16$) + \"?\\\\:\\\\:\" + H16$ + \"\\\\:\" + LS32$),\n //[ *3( h16 \":\" ) h16 ] \"::\" h16 \":\" ls32\n IPV6ADDRESS7$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,4}\" + H16$) + \"?\\\\:\\\\:\" + LS32$),\n //[ *4( h16 \":\" ) h16 ] \"::\" ls32\n IPV6ADDRESS8$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,5}\" + H16$) + \"?\\\\:\\\\:\" + H16$),\n //[ *5( h16 \":\" ) h16 ] \"::\" h16\n IPV6ADDRESS9$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,6}\" + H16$) + \"?\\\\:\\\\:\"),\n //[ *6( h16 \":\" ) h16 ] \"::\"\n IPV6ADDRESS$ = subexp([IPV6ADDRESS1$, IPV6ADDRESS2$, IPV6ADDRESS3$, IPV6ADDRESS4$, IPV6ADDRESS5$, IPV6ADDRESS6$, IPV6ADDRESS7$, IPV6ADDRESS8$, IPV6ADDRESS9$].join(\"|\")),\n ZONEID$ = subexp(subexp(UNRESERVED$$ + \"|\" + PCT_ENCODED$) + \"+\"),\n //RFC 6874\n IPV6ADDRZ$ = subexp(IPV6ADDRESS$ + \"\\\\%25\" + ZONEID$),\n //RFC 6874\n IPV6ADDRZ_RELAXED$ = subexp(IPV6ADDRESS$ + subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + ZONEID$),\n //RFC 6874, with relaxed parsing rules\n IPVFUTURE$ = subexp(\"[vV]\" + HEXDIG$$ + \"+\\\\.\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\") + \"+\"),\n IP_LITERAL$ = subexp(\"\\\\[\" + subexp(IPV6ADDRZ_RELAXED$ + \"|\" + IPV6ADDRESS$ + \"|\" + IPVFUTURE$) + \"\\\\]\"),\n //RFC 6874\n REG_NAME$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$)) + \"*\"),\n HOST$ = subexp(IP_LITERAL$ + \"|\" + IPV4ADDRESS$ + \"(?!\" + REG_NAME$ + \")\" + \"|\" + REG_NAME$),\n PORT$ = subexp(DIGIT$$ + \"*\"),\n AUTHORITY$ = subexp(subexp(USERINFO$ + \"@\") + \"?\" + HOST$ + subexp(\"\\\\:\" + PORT$) + \"?\"),\n PCHAR$ = subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@]\")),\n SEGMENT$ = subexp(PCHAR$ + \"*\"),\n SEGMENT_NZ$ = subexp(PCHAR$ + \"+\"),\n SEGMENT_NZ_NC$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\@]\")) + \"+\"),\n PATH_ABEMPTY$ = subexp(subexp(\"\\\\/\" + SEGMENT$) + \"*\"),\n PATH_ABSOLUTE$ = subexp(\"\\\\/\" + subexp(SEGMENT_NZ$ + PATH_ABEMPTY$) + \"?\"),\n //simplified\n PATH_NOSCHEME$ = subexp(SEGMENT_NZ_NC$ + PATH_ABEMPTY$),\n //simplified\n PATH_ROOTLESS$ = subexp(SEGMENT_NZ$ + PATH_ABEMPTY$),\n //simplified\n PATH_EMPTY$ = \"(?!\" + PCHAR$ + \")\",\n PATH$ = subexp(PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n QUERY$ = subexp(subexp(PCHAR$ + \"|\" + merge(\"[\\\\/\\\\?]\", IPRIVATE$$)) + \"*\"),\n FRAGMENT$ = subexp(subexp(PCHAR$ + \"|[\\\\/\\\\?]\") + \"*\"),\n HIER_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n RELATIVE_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$),\n RELATIVE$ = subexp(RELATIVE_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n URI_REFERENCE$ = subexp(URI$ + \"|\" + RELATIVE$),\n ABSOLUTE_URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\"),\n GENERIC_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n RELATIVE_REF$ = \"^(){0}\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n ABSOLUTE_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?$\",\n SAMEDOC_REF$ = \"^\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n AUTHORITY_REF$ = \"^\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?$\";\n return {\n NOT_SCHEME: new RegExp(merge(\"[^]\", ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\"), \"g\"),\n NOT_USERINFO: new RegExp(merge(\"[^\\\\%\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_HOST: new RegExp(merge(\"[^\\\\%\\\\[\\\\]\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH: new RegExp(merge(\"[^\\\\%\\\\/\\\\:\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH_NOSCHEME: new RegExp(merge(\"[^\\\\%\\\\/\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_QUERY: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\", IPRIVATE$$), \"g\"),\n NOT_FRAGMENT: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\"), \"g\"),\n ESCAPE: new RegExp(merge(\"[^]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n UNRESERVED: new RegExp(UNRESERVED$$, \"g\"),\n OTHER_CHARS: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, RESERVED$$), \"g\"),\n PCT_ENCODED: new RegExp(PCT_ENCODED$, \"g\"),\n IPV4ADDRESS: new RegExp(\"^(\" + IPV4ADDRESS$ + \")$\"),\n IPV6ADDRESS: new RegExp(\"^\\\\[?(\" + IPV6ADDRESS$ + \")\" + subexp(subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + \"(\" + ZONEID$ + \")\") + \"?\\\\]?$\") //RFC 6874, with relaxed parsing rules\n };\n}\nvar URI_PROTOCOL = buildExps(false);\n\nvar IRI_PROTOCOL = buildExps(true);\n\nvar slicedToArray = function () {\n function sliceIterator(arr, i) {\n var _arr = [];\n var _n = true;\n var _d = false;\n var _e = undefined;\n\n try {\n for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {\n _arr.push(_s.value);\n\n if (i && _arr.length === i) break;\n }\n } catch (err) {\n _d = true;\n _e = err;\n } finally {\n try {\n if (!_n && _i[\"return\"]) _i[\"return\"]();\n } finally {\n if (_d) throw _e;\n }\n }\n\n return _arr;\n }\n\n return function (arr, i) {\n if (Array.isArray(arr)) {\n return arr;\n } else if (Symbol.iterator in Object(arr)) {\n return sliceIterator(arr, i);\n } else {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\");\n }\n };\n}();\n\n\n\n\n\n\n\n\n\n\n\n\n\nvar toConsumableArray = function (arr) {\n if (Array.isArray(arr)) {\n for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];\n\n return arr2;\n } else {\n return Array.from(arr);\n }\n};\n\n/** Highest positive signed 32-bit float value */\n\nvar maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1\n\n/** Bootstring parameters */\nvar base = 36;\nvar tMin = 1;\nvar tMax = 26;\nvar skew = 38;\nvar damp = 700;\nvar initialBias = 72;\nvar initialN = 128; // 0x80\nvar delimiter = '-'; // '\\x2D'\n\n/** Regular expressions */\nvar regexPunycode = /^xn--/;\nvar regexNonASCII = /[^\\0-\\x7E]/; // non-ASCII chars\nvar regexSeparators = /[\\x2E\\u3002\\uFF0E\\uFF61]/g; // RFC 3490 separators\n\n/** Error messages */\nvar errors = {\n\t'overflow': 'Overflow: input needs wider integers to process',\n\t'not-basic': 'Illegal input >= 0x80 (not a basic code point)',\n\t'invalid-input': 'Invalid input'\n};\n\n/** Convenience shortcuts */\nvar baseMinusTMin = base - tMin;\nvar floor = Math.floor;\nvar stringFromCharCode = String.fromCharCode;\n\n/*--------------------------------------------------------------------------*/\n\n/**\n * A generic error utility function.\n * @private\n * @param {String} type The error type.\n * @returns {Error} Throws a `RangeError` with the applicable error message.\n */\nfunction error$1(type) {\n\tthrow new RangeError(errors[type]);\n}\n\n/**\n * A generic `Array#map` utility function.\n * @private\n * @param {Array} array The array to iterate over.\n * @param {Function} callback The function that gets called for every array\n * item.\n * @returns {Array} A new array of values returned by the callback function.\n */\nfunction map(array, fn) {\n\tvar result = [];\n\tvar length = array.length;\n\twhile (length--) {\n\t\tresult[length] = fn(array[length]);\n\t}\n\treturn result;\n}\n\n/**\n * A simple `Array#map`-like wrapper to work with domain name strings or email\n * addresses.\n * @private\n * @param {String} domain The domain name or email address.\n * @param {Function} callback The function that gets called for every\n * character.\n * @returns {Array} A new string of characters returned by the callback\n * function.\n */\nfunction mapDomain(string, fn) {\n\tvar parts = string.split('@');\n\tvar result = '';\n\tif (parts.length > 1) {\n\t\t// In email addresses, only the domain name should be punycoded. Leave\n\t\t// the local part (i.e. everything up to `@`) intact.\n\t\tresult = parts[0] + '@';\n\t\tstring = parts[1];\n\t}\n\t// Avoid `split(regex)` for IE8 compatibility. See #17.\n\tstring = string.replace(regexSeparators, '\\x2E');\n\tvar labels = string.split('.');\n\tvar encoded = map(labels, fn).join('.');\n\treturn result + encoded;\n}\n\n/**\n * Creates an array containing the numeric code points of each Unicode\n * character in the string. While JavaScript uses UCS-2 internally,\n * this function will convert a pair of surrogate halves (each of which\n * UCS-2 exposes as separate characters) into a single code point,\n * matching UTF-16.\n * @see `punycode.ucs2.encode`\n * @see \n * @memberOf punycode.ucs2\n * @name decode\n * @param {String} string The Unicode input string (UCS-2).\n * @returns {Array} The new array of code points.\n */\nfunction ucs2decode(string) {\n\tvar output = [];\n\tvar counter = 0;\n\tvar length = string.length;\n\twhile (counter < length) {\n\t\tvar value = string.charCodeAt(counter++);\n\t\tif (value >= 0xD800 && value <= 0xDBFF && counter < length) {\n\t\t\t// It's a high surrogate, and there is a next character.\n\t\t\tvar extra = string.charCodeAt(counter++);\n\t\t\tif ((extra & 0xFC00) == 0xDC00) {\n\t\t\t\t// Low surrogate.\n\t\t\t\toutput.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);\n\t\t\t} else {\n\t\t\t\t// It's an unmatched surrogate; only append this code unit, in case the\n\t\t\t\t// next code unit is the high surrogate of a surrogate pair.\n\t\t\t\toutput.push(value);\n\t\t\t\tcounter--;\n\t\t\t}\n\t\t} else {\n\t\t\toutput.push(value);\n\t\t}\n\t}\n\treturn output;\n}\n\n/**\n * Creates a string based on an array of numeric code points.\n * @see `punycode.ucs2.decode`\n * @memberOf punycode.ucs2\n * @name encode\n * @param {Array} codePoints The array of numeric code points.\n * @returns {String} The new Unicode string (UCS-2).\n */\nvar ucs2encode = function ucs2encode(array) {\n\treturn String.fromCodePoint.apply(String, toConsumableArray(array));\n};\n\n/**\n * Converts a basic code point into a digit/integer.\n * @see `digitToBasic()`\n * @private\n * @param {Number} codePoint The basic numeric code point value.\n * @returns {Number} The numeric value of a basic code point (for use in\n * representing integers) in the range `0` to `base - 1`, or `base` if\n * the code point does not represent a value.\n */\nvar basicToDigit = function basicToDigit(codePoint) {\n\tif (codePoint - 0x30 < 0x0A) {\n\t\treturn codePoint - 0x16;\n\t}\n\tif (codePoint - 0x41 < 0x1A) {\n\t\treturn codePoint - 0x41;\n\t}\n\tif (codePoint - 0x61 < 0x1A) {\n\t\treturn codePoint - 0x61;\n\t}\n\treturn base;\n};\n\n/**\n * Converts a digit/integer into a basic code point.\n * @see `basicToDigit()`\n * @private\n * @param {Number} digit The numeric value of a basic code point.\n * @returns {Number} The basic code point whose value (when used for\n * representing integers) is `digit`, which needs to be in the range\n * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is\n * used; else, the lowercase form is used. The behavior is undefined\n * if `flag` is non-zero and `digit` has no uppercase form.\n */\nvar digitToBasic = function digitToBasic(digit, flag) {\n\t// 0..25 map to ASCII a..z or A..Z\n\t// 26..35 map to ASCII 0..9\n\treturn digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);\n};\n\n/**\n * Bias adaptation function as per section 3.4 of RFC 3492.\n * https://tools.ietf.org/html/rfc3492#section-3.4\n * @private\n */\nvar adapt = function adapt(delta, numPoints, firstTime) {\n\tvar k = 0;\n\tdelta = firstTime ? floor(delta / damp) : delta >> 1;\n\tdelta += floor(delta / numPoints);\n\tfor (; /* no initialization */delta > baseMinusTMin * tMax >> 1; k += base) {\n\t\tdelta = floor(delta / baseMinusTMin);\n\t}\n\treturn floor(k + (baseMinusTMin + 1) * delta / (delta + skew));\n};\n\n/**\n * Converts a Punycode string of ASCII-only symbols to a string of Unicode\n * symbols.\n * @memberOf punycode\n * @param {String} input The Punycode string of ASCII-only symbols.\n * @returns {String} The resulting string of Unicode symbols.\n */\nvar decode = function decode(input) {\n\t// Don't use UCS-2.\n\tvar output = [];\n\tvar inputLength = input.length;\n\tvar i = 0;\n\tvar n = initialN;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points: let `basic` be the number of input code\n\t// points before the last delimiter, or `0` if there is none, then copy\n\t// the first basic code points to the output.\n\n\tvar basic = input.lastIndexOf(delimiter);\n\tif (basic < 0) {\n\t\tbasic = 0;\n\t}\n\n\tfor (var j = 0; j < basic; ++j) {\n\t\t// if it's not a basic code point\n\t\tif (input.charCodeAt(j) >= 0x80) {\n\t\t\terror$1('not-basic');\n\t\t}\n\t\toutput.push(input.charCodeAt(j));\n\t}\n\n\t// Main decoding loop: start just after the last delimiter if any basic code\n\t// points were copied; start at the beginning otherwise.\n\n\tfor (var index = basic > 0 ? basic + 1 : 0; index < inputLength;) /* no final expression */{\n\n\t\t// `index` is the index of the next character to be consumed.\n\t\t// Decode a generalized variable-length integer into `delta`,\n\t\t// which gets added to `i`. The overflow checking is easier\n\t\t// if we increase `i` as we go, then subtract off its starting\n\t\t// value at the end to obtain `delta`.\n\t\tvar oldi = i;\n\t\tfor (var w = 1, k = base;; /* no condition */k += base) {\n\n\t\t\tif (index >= inputLength) {\n\t\t\t\terror$1('invalid-input');\n\t\t\t}\n\n\t\t\tvar digit = basicToDigit(input.charCodeAt(index++));\n\n\t\t\tif (digit >= base || digit > floor((maxInt - i) / w)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\ti += digit * w;\n\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\n\t\t\tif (digit < t) {\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tvar baseMinusT = base - t;\n\t\t\tif (w > floor(maxInt / baseMinusT)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\tw *= baseMinusT;\n\t\t}\n\n\t\tvar out = output.length + 1;\n\t\tbias = adapt(i - oldi, out, oldi == 0);\n\n\t\t// `i` was supposed to wrap around from `out` to `0`,\n\t\t// incrementing `n` each time, so we'll fix that now:\n\t\tif (floor(i / out) > maxInt - n) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tn += floor(i / out);\n\t\ti %= out;\n\n\t\t// Insert `n` at position `i` of the output.\n\t\toutput.splice(i++, 0, n);\n\t}\n\n\treturn String.fromCodePoint.apply(String, output);\n};\n\n/**\n * Converts a string of Unicode symbols (e.g. a domain name label) to a\n * Punycode string of ASCII-only symbols.\n * @memberOf punycode\n * @param {String} input The string of Unicode symbols.\n * @returns {String} The resulting Punycode string of ASCII-only symbols.\n */\nvar encode = function encode(input) {\n\tvar output = [];\n\n\t// Convert the input in UCS-2 to an array of Unicode code points.\n\tinput = ucs2decode(input);\n\n\t// Cache the length.\n\tvar inputLength = input.length;\n\n\t// Initialize the state.\n\tvar n = initialN;\n\tvar delta = 0;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points.\n\tvar _iteratorNormalCompletion = true;\n\tvar _didIteratorError = false;\n\tvar _iteratorError = undefined;\n\n\ttry {\n\t\tfor (var _iterator = input[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {\n\t\t\tvar _currentValue2 = _step.value;\n\n\t\t\tif (_currentValue2 < 0x80) {\n\t\t\t\toutput.push(stringFromCharCode(_currentValue2));\n\t\t\t}\n\t\t}\n\t} catch (err) {\n\t\t_didIteratorError = true;\n\t\t_iteratorError = err;\n\t} finally {\n\t\ttry {\n\t\t\tif (!_iteratorNormalCompletion && _iterator.return) {\n\t\t\t\t_iterator.return();\n\t\t\t}\n\t\t} finally {\n\t\t\tif (_didIteratorError) {\n\t\t\t\tthrow _iteratorError;\n\t\t\t}\n\t\t}\n\t}\n\n\tvar basicLength = output.length;\n\tvar handledCPCount = basicLength;\n\n\t// `handledCPCount` is the number of code points that have been handled;\n\t// `basicLength` is the number of basic code points.\n\n\t// Finish the basic string with a delimiter unless it's empty.\n\tif (basicLength) {\n\t\toutput.push(delimiter);\n\t}\n\n\t// Main encoding loop:\n\twhile (handledCPCount < inputLength) {\n\n\t\t// All non-basic code points < n have been handled already. Find the next\n\t\t// larger one:\n\t\tvar m = maxInt;\n\t\tvar _iteratorNormalCompletion2 = true;\n\t\tvar _didIteratorError2 = false;\n\t\tvar _iteratorError2 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator2 = input[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {\n\t\t\t\tvar currentValue = _step2.value;\n\n\t\t\t\tif (currentValue >= n && currentValue < m) {\n\t\t\t\t\tm = currentValue;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Increase `delta` enough to advance the decoder's state to ,\n\t\t\t// but guard against overflow.\n\t\t} catch (err) {\n\t\t\t_didIteratorError2 = true;\n\t\t\t_iteratorError2 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion2 && _iterator2.return) {\n\t\t\t\t\t_iterator2.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError2) {\n\t\t\t\t\tthrow _iteratorError2;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tvar handledCPCountPlusOne = handledCPCount + 1;\n\t\tif (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tdelta += (m - n) * handledCPCountPlusOne;\n\t\tn = m;\n\n\t\tvar _iteratorNormalCompletion3 = true;\n\t\tvar _didIteratorError3 = false;\n\t\tvar _iteratorError3 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator3 = input[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {\n\t\t\t\tvar _currentValue = _step3.value;\n\n\t\t\t\tif (_currentValue < n && ++delta > maxInt) {\n\t\t\t\t\terror$1('overflow');\n\t\t\t\t}\n\t\t\t\tif (_currentValue == n) {\n\t\t\t\t\t// Represent delta as a generalized variable-length integer.\n\t\t\t\t\tvar q = delta;\n\t\t\t\t\tfor (var k = base;; /* no condition */k += base) {\n\t\t\t\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\t\t\t\t\t\tif (q < t) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tvar qMinusT = q - t;\n\t\t\t\t\t\tvar baseMinusT = base - t;\n\t\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)));\n\t\t\t\t\t\tq = floor(qMinusT / baseMinusT);\n\t\t\t\t\t}\n\n\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(q, 0)));\n\t\t\t\t\tbias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);\n\t\t\t\t\tdelta = 0;\n\t\t\t\t\t++handledCPCount;\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err) {\n\t\t\t_didIteratorError3 = true;\n\t\t\t_iteratorError3 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion3 && _iterator3.return) {\n\t\t\t\t\t_iterator3.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError3) {\n\t\t\t\t\tthrow _iteratorError3;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t++delta;\n\t\t++n;\n\t}\n\treturn output.join('');\n};\n\n/**\n * Converts a Punycode string representing a domain name or an email address\n * to Unicode. Only the Punycoded parts of the input will be converted, i.e.\n * it doesn't matter if you call it on a string that has already been\n * converted to Unicode.\n * @memberOf punycode\n * @param {String} input The Punycoded domain name or email address to\n * convert to Unicode.\n * @returns {String} The Unicode representation of the given Punycode\n * string.\n */\nvar toUnicode = function toUnicode(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string;\n\t});\n};\n\n/**\n * Converts a Unicode string representing a domain name or an email address to\n * Punycode. Only the non-ASCII parts of the domain name will be converted,\n * i.e. it doesn't matter if you call it with a domain that's already in\n * ASCII.\n * @memberOf punycode\n * @param {String} input The domain name or email address to convert, as a\n * Unicode string.\n * @returns {String} The Punycode representation of the given domain name or\n * email address.\n */\nvar toASCII = function toASCII(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexNonASCII.test(string) ? 'xn--' + encode(string) : string;\n\t});\n};\n\n/*--------------------------------------------------------------------------*/\n\n/** Define the public API */\nvar punycode = {\n\t/**\n * A string representing the current Punycode.js version number.\n * @memberOf punycode\n * @type String\n */\n\t'version': '2.1.0',\n\t/**\n * An object of methods to convert from JavaScript's internal character\n * representation (UCS-2) to Unicode code points, and back.\n * @see \n * @memberOf punycode\n * @type Object\n */\n\t'ucs2': {\n\t\t'decode': ucs2decode,\n\t\t'encode': ucs2encode\n\t},\n\t'decode': decode,\n\t'encode': encode,\n\t'toASCII': toASCII,\n\t'toUnicode': toUnicode\n};\n\n/**\n * URI.js\n *\n * @fileoverview An RFC 3986 compliant, scheme extendable URI parsing/validating/resolving library for JavaScript.\n * @author Gary Court\n * @see http://github.com/garycourt/uri-js\n */\n/**\n * Copyright 2011 Gary Court. All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without modification, are\n * permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice, this list of\n * conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice, this list\n * of conditions and the following disclaimer in the documentation and/or other materials\n * provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY GARY COURT ``AS IS'' AND ANY EXPRESS OR IMPLIED\n * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND\n * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR\n * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * The views and conclusions contained in the software and documentation are those of the\n * authors and should not be interpreted as representing official policies, either expressed\n * or implied, of Gary Court.\n */\nvar SCHEMES = {};\nfunction pctEncChar(chr) {\n var c = chr.charCodeAt(0);\n var e = void 0;\n if (c < 16) e = \"%0\" + c.toString(16).toUpperCase();else if (c < 128) e = \"%\" + c.toString(16).toUpperCase();else if (c < 2048) e = \"%\" + (c >> 6 | 192).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();else e = \"%\" + (c >> 12 | 224).toString(16).toUpperCase() + \"%\" + (c >> 6 & 63 | 128).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();\n return e;\n}\nfunction pctDecChars(str) {\n var newStr = \"\";\n var i = 0;\n var il = str.length;\n while (i < il) {\n var c = parseInt(str.substr(i + 1, 2), 16);\n if (c < 128) {\n newStr += String.fromCharCode(c);\n i += 3;\n } else if (c >= 194 && c < 224) {\n if (il - i >= 6) {\n var c2 = parseInt(str.substr(i + 4, 2), 16);\n newStr += String.fromCharCode((c & 31) << 6 | c2 & 63);\n } else {\n newStr += str.substr(i, 6);\n }\n i += 6;\n } else if (c >= 224) {\n if (il - i >= 9) {\n var _c = parseInt(str.substr(i + 4, 2), 16);\n var c3 = parseInt(str.substr(i + 7, 2), 16);\n newStr += String.fromCharCode((c & 15) << 12 | (_c & 63) << 6 | c3 & 63);\n } else {\n newStr += str.substr(i, 9);\n }\n i += 9;\n } else {\n newStr += str.substr(i, 3);\n i += 3;\n }\n }\n return newStr;\n}\nfunction _normalizeComponentEncoding(components, protocol) {\n function decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(protocol.UNRESERVED) ? str : decStr;\n }\n if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, \"\");\n if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n return components;\n}\n\nfunction _stripLeadingZeros(str) {\n return str.replace(/^0*(.*)/, \"$1\") || \"0\";\n}\nfunction _normalizeIPv4(host, protocol) {\n var matches = host.match(protocol.IPV4ADDRESS) || [];\n\n var _matches = slicedToArray(matches, 2),\n address = _matches[1];\n\n if (address) {\n return address.split(\".\").map(_stripLeadingZeros).join(\".\");\n } else {\n return host;\n }\n}\nfunction _normalizeIPv6(host, protocol) {\n var matches = host.match(protocol.IPV6ADDRESS) || [];\n\n var _matches2 = slicedToArray(matches, 3),\n address = _matches2[1],\n zone = _matches2[2];\n\n if (address) {\n var _address$toLowerCase$ = address.toLowerCase().split('::').reverse(),\n _address$toLowerCase$2 = slicedToArray(_address$toLowerCase$, 2),\n last = _address$toLowerCase$2[0],\n first = _address$toLowerCase$2[1];\n\n var firstFields = first ? first.split(\":\").map(_stripLeadingZeros) : [];\n var lastFields = last.split(\":\").map(_stripLeadingZeros);\n var isLastFieldIPv4Address = protocol.IPV4ADDRESS.test(lastFields[lastFields.length - 1]);\n var fieldCount = isLastFieldIPv4Address ? 7 : 8;\n var lastFieldsStart = lastFields.length - fieldCount;\n var fields = Array(fieldCount);\n for (var x = 0; x < fieldCount; ++x) {\n fields[x] = firstFields[x] || lastFields[lastFieldsStart + x] || '';\n }\n if (isLastFieldIPv4Address) {\n fields[fieldCount - 1] = _normalizeIPv4(fields[fieldCount - 1], protocol);\n }\n var allZeroFields = fields.reduce(function (acc, field, index) {\n if (!field || field === \"0\") {\n var lastLongest = acc[acc.length - 1];\n if (lastLongest && lastLongest.index + lastLongest.length === index) {\n lastLongest.length++;\n } else {\n acc.push({ index: index, length: 1 });\n }\n }\n return acc;\n }, []);\n var longestZeroFields = allZeroFields.sort(function (a, b) {\n return b.length - a.length;\n })[0];\n var newHost = void 0;\n if (longestZeroFields && longestZeroFields.length > 1) {\n var newFirst = fields.slice(0, longestZeroFields.index);\n var newLast = fields.slice(longestZeroFields.index + longestZeroFields.length);\n newHost = newFirst.join(\":\") + \"::\" + newLast.join(\":\");\n } else {\n newHost = fields.join(\":\");\n }\n if (zone) {\n newHost += \"%\" + zone;\n }\n return newHost;\n } else {\n return host;\n }\n}\nvar URI_PARSE = /^(?:([^:\\/?#]+):)?(?:\\/\\/((?:([^\\/?#@]*)@)?(\\[[^\\/?#\\]]+\\]|[^\\/?#:]*)(?:\\:(\\d*))?))?([^?#]*)(?:\\?([^#]*))?(?:#((?:.|\\n|\\r)*))?/i;\nvar NO_MATCH_IS_UNDEFINED = \"\".match(/(){0}/)[1] === undefined;\nfunction parse(uriString) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var components = {};\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n if (options.reference === \"suffix\") uriString = (options.scheme ? options.scheme + \":\" : \"\") + \"//\" + uriString;\n var matches = uriString.match(URI_PARSE);\n if (matches) {\n if (NO_MATCH_IS_UNDEFINED) {\n //store each component\n components.scheme = matches[1];\n components.userinfo = matches[3];\n components.host = matches[4];\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = matches[7];\n components.fragment = matches[8];\n //fix port number\n if (isNaN(components.port)) {\n components.port = matches[5];\n }\n } else {\n //IE FIX for improper RegExp matching\n //store each component\n components.scheme = matches[1] || undefined;\n components.userinfo = uriString.indexOf(\"@\") !== -1 ? matches[3] : undefined;\n components.host = uriString.indexOf(\"//\") !== -1 ? matches[4] : undefined;\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = uriString.indexOf(\"?\") !== -1 ? matches[7] : undefined;\n components.fragment = uriString.indexOf(\"#\") !== -1 ? matches[8] : undefined;\n //fix port number\n if (isNaN(components.port)) {\n components.port = uriString.match(/\\/\\/(?:.|\\n)*\\:(?:\\/|\\?|\\#|$)/) ? matches[4] : undefined;\n }\n }\n if (components.host) {\n //normalize IP hosts\n components.host = _normalizeIPv6(_normalizeIPv4(components.host, protocol), protocol);\n }\n //determine reference type\n if (components.scheme === undefined && components.userinfo === undefined && components.host === undefined && components.port === undefined && !components.path && components.query === undefined) {\n components.reference = \"same-document\";\n } else if (components.scheme === undefined) {\n components.reference = \"relative\";\n } else if (components.fragment === undefined) {\n components.reference = \"absolute\";\n } else {\n components.reference = \"uri\";\n }\n //check for reference errors\n if (options.reference && options.reference !== \"suffix\" && options.reference !== components.reference) {\n components.error = components.error || \"URI is not a \" + options.reference + \" reference.\";\n }\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //check if scheme can't handle IRIs\n if (!options.unicodeSupport && (!schemeHandler || !schemeHandler.unicodeSupport)) {\n //if host component is a domain name\n if (components.host && (options.domainHost || schemeHandler && schemeHandler.domainHost)) {\n //convert Unicode IDN -> ASCII IDN\n try {\n components.host = punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase());\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to ASCII via punycode: \" + e;\n }\n }\n //convert IRI -> URI\n _normalizeComponentEncoding(components, URI_PROTOCOL);\n } else {\n //normalize encodings\n _normalizeComponentEncoding(components, protocol);\n }\n //perform scheme specific parsing\n if (schemeHandler && schemeHandler.parse) {\n schemeHandler.parse(components, options);\n }\n } else {\n components.error = components.error || \"URI can not be parsed.\";\n }\n return components;\n}\n\nfunction _recomposeAuthority(components, options) {\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n if (components.userinfo !== undefined) {\n uriTokens.push(components.userinfo);\n uriTokens.push(\"@\");\n }\n if (components.host !== undefined) {\n //normalize IP hosts, add brackets and escape zone separator for IPv6\n uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, function (_, $1, $2) {\n return \"[\" + $1 + ($2 ? \"%25\" + $2 : \"\") + \"]\";\n }));\n }\n if (typeof components.port === \"number\" || typeof components.port === \"string\") {\n uriTokens.push(\":\");\n uriTokens.push(String(components.port));\n }\n return uriTokens.length ? uriTokens.join(\"\") : undefined;\n}\n\nvar RDS1 = /^\\.\\.?\\//;\nvar RDS2 = /^\\/\\.(\\/|$)/;\nvar RDS3 = /^\\/\\.\\.(\\/|$)/;\nvar RDS5 = /^\\/?(?:.|\\n)*?(?=\\/|$)/;\nfunction removeDotSegments(input) {\n var output = [];\n while (input.length) {\n if (input.match(RDS1)) {\n input = input.replace(RDS1, \"\");\n } else if (input.match(RDS2)) {\n input = input.replace(RDS2, \"/\");\n } else if (input.match(RDS3)) {\n input = input.replace(RDS3, \"/\");\n output.pop();\n } else if (input === \".\" || input === \"..\") {\n input = \"\";\n } else {\n var im = input.match(RDS5);\n if (im) {\n var s = im[0];\n input = input.slice(s.length);\n output.push(s);\n } else {\n throw new Error(\"Unexpected dot segment condition\");\n }\n }\n }\n return output.join(\"\");\n}\n\nfunction serialize(components) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var protocol = options.iri ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //perform scheme specific serialization\n if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options);\n if (components.host) {\n //if host component is an IPv6 address\n if (protocol.IPV6ADDRESS.test(components.host)) {}\n //TODO: normalize IPv6 address as per RFC 5952\n\n //if host component is a domain name\n else if (options.domainHost || schemeHandler && schemeHandler.domainHost) {\n //convert IDN via punycode\n try {\n components.host = !options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host);\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n }\n }\n //normalize encoding\n _normalizeComponentEncoding(components, protocol);\n if (options.reference !== \"suffix\" && components.scheme) {\n uriTokens.push(components.scheme);\n uriTokens.push(\":\");\n }\n var authority = _recomposeAuthority(components, options);\n if (authority !== undefined) {\n if (options.reference !== \"suffix\") {\n uriTokens.push(\"//\");\n }\n uriTokens.push(authority);\n if (components.path && components.path.charAt(0) !== \"/\") {\n uriTokens.push(\"/\");\n }\n }\n if (components.path !== undefined) {\n var s = components.path;\n if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) {\n s = removeDotSegments(s);\n }\n if (authority === undefined) {\n s = s.replace(/^\\/\\//, \"/%2F\"); //don't allow the path to start with \"//\"\n }\n uriTokens.push(s);\n }\n if (components.query !== undefined) {\n uriTokens.push(\"?\");\n uriTokens.push(components.query);\n }\n if (components.fragment !== undefined) {\n uriTokens.push(\"#\");\n uriTokens.push(components.fragment);\n }\n return uriTokens.join(\"\"); //merge tokens into a string\n}\n\nfunction resolveComponents(base, relative) {\n var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};\n var skipNormalization = arguments[3];\n\n var target = {};\n if (!skipNormalization) {\n base = parse(serialize(base, options), options); //normalize base components\n relative = parse(serialize(relative, options), options); //normalize relative components\n }\n options = options || {};\n if (!options.tolerant && relative.scheme) {\n target.scheme = relative.scheme;\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) {\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (!relative.path) {\n target.path = base.path;\n if (relative.query !== undefined) {\n target.query = relative.query;\n } else {\n target.query = base.query;\n }\n } else {\n if (relative.path.charAt(0) === \"/\") {\n target.path = removeDotSegments(relative.path);\n } else {\n if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) {\n target.path = \"/\" + relative.path;\n } else if (!base.path) {\n target.path = relative.path;\n } else {\n target.path = base.path.slice(0, base.path.lastIndexOf(\"/\") + 1) + relative.path;\n }\n target.path = removeDotSegments(target.path);\n }\n target.query = relative.query;\n }\n //target.authority = base.authority;\n target.userinfo = base.userinfo;\n target.host = base.host;\n target.port = base.port;\n }\n target.scheme = base.scheme;\n }\n target.fragment = relative.fragment;\n return target;\n}\n\nfunction resolve(baseURI, relativeURI, options) {\n var schemelessOptions = assign({ scheme: 'null' }, options);\n return serialize(resolveComponents(parse(baseURI, schemelessOptions), parse(relativeURI, schemelessOptions), schemelessOptions, true), schemelessOptions);\n}\n\nfunction normalize(uri, options) {\n if (typeof uri === \"string\") {\n uri = serialize(parse(uri, options), options);\n } else if (typeOf(uri) === \"object\") {\n uri = parse(serialize(uri, options), options);\n }\n return uri;\n}\n\nfunction equal(uriA, uriB, options) {\n if (typeof uriA === \"string\") {\n uriA = serialize(parse(uriA, options), options);\n } else if (typeOf(uriA) === \"object\") {\n uriA = serialize(uriA, options);\n }\n if (typeof uriB === \"string\") {\n uriB = serialize(parse(uriB, options), options);\n } else if (typeOf(uriB) === \"object\") {\n uriB = serialize(uriB, options);\n }\n return uriA === uriB;\n}\n\nfunction escapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.ESCAPE : IRI_PROTOCOL.ESCAPE, pctEncChar);\n}\n\nfunction unescapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.PCT_ENCODED : IRI_PROTOCOL.PCT_ENCODED, pctDecChars);\n}\n\nvar handler = {\n scheme: \"http\",\n domainHost: true,\n parse: function parse(components, options) {\n //report missing host\n if (!components.host) {\n components.error = components.error || \"HTTP URIs must have a host.\";\n }\n return components;\n },\n serialize: function serialize(components, options) {\n var secure = String(components.scheme).toLowerCase() === \"https\";\n //normalize the default port\n if (components.port === (secure ? 443 : 80) || components.port === \"\") {\n components.port = undefined;\n }\n //normalize the empty path\n if (!components.path) {\n components.path = \"/\";\n }\n //NOTE: We do not parse query strings for HTTP URIs\n //as WWW Form Url Encoded query strings are part of the HTML4+ spec,\n //and not the HTTP spec.\n return components;\n }\n};\n\nvar handler$1 = {\n scheme: \"https\",\n domainHost: handler.domainHost,\n parse: handler.parse,\n serialize: handler.serialize\n};\n\nfunction isSecure(wsComponents) {\n return typeof wsComponents.secure === 'boolean' ? wsComponents.secure : String(wsComponents.scheme).toLowerCase() === \"wss\";\n}\n//RFC 6455\nvar handler$2 = {\n scheme: \"ws\",\n domainHost: true,\n parse: function parse(components, options) {\n var wsComponents = components;\n //indicate if the secure flag is set\n wsComponents.secure = isSecure(wsComponents);\n //construct resouce name\n wsComponents.resourceName = (wsComponents.path || '/') + (wsComponents.query ? '?' + wsComponents.query : '');\n wsComponents.path = undefined;\n wsComponents.query = undefined;\n return wsComponents;\n },\n serialize: function serialize(wsComponents, options) {\n //normalize the default port\n if (wsComponents.port === (isSecure(wsComponents) ? 443 : 80) || wsComponents.port === \"\") {\n wsComponents.port = undefined;\n }\n //ensure scheme matches secure flag\n if (typeof wsComponents.secure === 'boolean') {\n wsComponents.scheme = wsComponents.secure ? 'wss' : 'ws';\n wsComponents.secure = undefined;\n }\n //reconstruct path from resource name\n if (wsComponents.resourceName) {\n var _wsComponents$resourc = wsComponents.resourceName.split('?'),\n _wsComponents$resourc2 = slicedToArray(_wsComponents$resourc, 2),\n path = _wsComponents$resourc2[0],\n query = _wsComponents$resourc2[1];\n\n wsComponents.path = path && path !== '/' ? path : undefined;\n wsComponents.query = query;\n wsComponents.resourceName = undefined;\n }\n //forbid fragment component\n wsComponents.fragment = undefined;\n return wsComponents;\n }\n};\n\nvar handler$3 = {\n scheme: \"wss\",\n domainHost: handler$2.domainHost,\n parse: handler$2.parse,\n serialize: handler$2.serialize\n};\n\nvar O = {};\nvar isIRI = true;\n//RFC 3986\nvar UNRESERVED$$ = \"[A-Za-z0-9\\\\-\\\\.\\\\_\\\\~\" + (isIRI ? \"\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF\" : \"\") + \"]\";\nvar HEXDIG$$ = \"[0-9A-Fa-f]\"; //case-insensitive\nvar PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)); //expanded\n//RFC 5322, except these symbols as per RFC 6068: @ : / ? # [ ] & ; =\n//const ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\#\\\\$\\\\%\\\\&\\\\'\\\\*\\\\+\\\\-\\\\/\\\\=\\\\?\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QTEXT$$ = \"[\\\\x01-\\\\x08\\\\x0B\\\\x0C\\\\x0E-\\\\x1F\\\\x7F]\"; //(%d1-8 / %d11-12 / %d14-31 / %d127)\n//const QTEXT$$ = merge(\"[\\\\x21\\\\x23-\\\\x5B\\\\x5D-\\\\x7E]\", OBS_QTEXT$$); //%d33 / %d35-91 / %d93-126 / obs-qtext\n//const VCHAR$$ = \"[\\\\x21-\\\\x7E]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QP$ = subexp(\"\\\\\\\\\" + merge(\"[\\\\x00\\\\x0D\\\\x0A]\", OBS_QTEXT$$)); //%d0 / CR / LF / obs-qtext\n//const FWS$ = subexp(subexp(WSP$$ + \"*\" + \"\\\\x0D\\\\x0A\") + \"?\" + WSP$$ + \"+\");\n//const QUOTED_PAIR$ = subexp(subexp(\"\\\\\\\\\" + subexp(VCHAR$$ + \"|\" + WSP$$)) + \"|\" + OBS_QP$);\n//const QUOTED_STRING$ = subexp('\\\\\"' + subexp(FWS$ + \"?\" + QCONTENT$) + \"*\" + FWS$ + \"?\" + '\\\\\"');\nvar ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\$\\\\%\\\\'\\\\*\\\\+\\\\-\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\nvar QTEXT$$ = \"[\\\\!\\\\$\\\\%\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\-\\\\.0-9\\\\<\\\\>A-Z\\\\x5E-\\\\x7E]\";\nvar VCHAR$$ = merge(QTEXT$$, \"[\\\\\\\"\\\\\\\\]\");\nvar SOME_DELIMS$$ = \"[\\\\!\\\\$\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\:\\\\@]\";\nvar UNRESERVED = new RegExp(UNRESERVED$$, \"g\");\nvar PCT_ENCODED = new RegExp(PCT_ENCODED$, \"g\");\nvar NOT_LOCAL_PART = new RegExp(merge(\"[^]\", ATEXT$$, \"[\\\\.]\", '[\\\\\"]', VCHAR$$), \"g\");\nvar NOT_HFNAME = new RegExp(merge(\"[^]\", UNRESERVED$$, SOME_DELIMS$$), \"g\");\nvar NOT_HFVALUE = NOT_HFNAME;\nfunction decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(UNRESERVED) ? str : decStr;\n}\nvar handler$4 = {\n scheme: \"mailto\",\n parse: function parse$$1(components, options) {\n var mailtoComponents = components;\n var to = mailtoComponents.to = mailtoComponents.path ? mailtoComponents.path.split(\",\") : [];\n mailtoComponents.path = undefined;\n if (mailtoComponents.query) {\n var unknownHeaders = false;\n var headers = {};\n var hfields = mailtoComponents.query.split(\"&\");\n for (var x = 0, xl = hfields.length; x < xl; ++x) {\n var hfield = hfields[x].split(\"=\");\n switch (hfield[0]) {\n case \"to\":\n var toAddrs = hfield[1].split(\",\");\n for (var _x = 0, _xl = toAddrs.length; _x < _xl; ++_x) {\n to.push(toAddrs[_x]);\n }\n break;\n case \"subject\":\n mailtoComponents.subject = unescapeComponent(hfield[1], options);\n break;\n case \"body\":\n mailtoComponents.body = unescapeComponent(hfield[1], options);\n break;\n default:\n unknownHeaders = true;\n headers[unescapeComponent(hfield[0], options)] = unescapeComponent(hfield[1], options);\n break;\n }\n }\n if (unknownHeaders) mailtoComponents.headers = headers;\n }\n mailtoComponents.query = undefined;\n for (var _x2 = 0, _xl2 = to.length; _x2 < _xl2; ++_x2) {\n var addr = to[_x2].split(\"@\");\n addr[0] = unescapeComponent(addr[0]);\n if (!options.unicodeSupport) {\n //convert Unicode IDN -> ASCII IDN\n try {\n addr[1] = punycode.toASCII(unescapeComponent(addr[1], options).toLowerCase());\n } catch (e) {\n mailtoComponents.error = mailtoComponents.error || \"Email address's domain name can not be converted to ASCII via punycode: \" + e;\n }\n } else {\n addr[1] = unescapeComponent(addr[1], options).toLowerCase();\n }\n to[_x2] = addr.join(\"@\");\n }\n return mailtoComponents;\n },\n serialize: function serialize$$1(mailtoComponents, options) {\n var components = mailtoComponents;\n var to = toArray(mailtoComponents.to);\n if (to) {\n for (var x = 0, xl = to.length; x < xl; ++x) {\n var toAddr = String(to[x]);\n var atIdx = toAddr.lastIndexOf(\"@\");\n var localPart = toAddr.slice(0, atIdx).replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_LOCAL_PART, pctEncChar);\n var domain = toAddr.slice(atIdx + 1);\n //convert IDN via punycode\n try {\n domain = !options.iri ? punycode.toASCII(unescapeComponent(domain, options).toLowerCase()) : punycode.toUnicode(domain);\n } catch (e) {\n components.error = components.error || \"Email address's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n to[x] = localPart + \"@\" + domain;\n }\n components.path = to.join(\",\");\n }\n var headers = mailtoComponents.headers = mailtoComponents.headers || {};\n if (mailtoComponents.subject) headers[\"subject\"] = mailtoComponents.subject;\n if (mailtoComponents.body) headers[\"body\"] = mailtoComponents.body;\n var fields = [];\n for (var name in headers) {\n if (headers[name] !== O[name]) {\n fields.push(name.replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFNAME, pctEncChar) + \"=\" + headers[name].replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFVALUE, pctEncChar));\n }\n }\n if (fields.length) {\n components.query = fields.join(\"&\");\n }\n return components;\n }\n};\n\nvar URN_PARSE = /^([^\\:]+)\\:(.*)/;\n//RFC 2141\nvar handler$5 = {\n scheme: \"urn\",\n parse: function parse$$1(components, options) {\n var matches = components.path && components.path.match(URN_PARSE);\n var urnComponents = components;\n if (matches) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = matches[1].toLowerCase();\n var nss = matches[2];\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n urnComponents.nid = nid;\n urnComponents.nss = nss;\n urnComponents.path = undefined;\n if (schemeHandler) {\n urnComponents = schemeHandler.parse(urnComponents, options);\n }\n } else {\n urnComponents.error = urnComponents.error || \"URN can not be parsed.\";\n }\n return urnComponents;\n },\n serialize: function serialize$$1(urnComponents, options) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = urnComponents.nid;\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n if (schemeHandler) {\n urnComponents = schemeHandler.serialize(urnComponents, options);\n }\n var uriComponents = urnComponents;\n var nss = urnComponents.nss;\n uriComponents.path = (nid || options.nid) + \":\" + nss;\n return uriComponents;\n }\n};\n\nvar UUID = /^[0-9A-Fa-f]{8}(?:\\-[0-9A-Fa-f]{4}){3}\\-[0-9A-Fa-f]{12}$/;\n//RFC 4122\nvar handler$6 = {\n scheme: \"urn:uuid\",\n parse: function parse(urnComponents, options) {\n var uuidComponents = urnComponents;\n uuidComponents.uuid = uuidComponents.nss;\n uuidComponents.nss = undefined;\n if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) {\n uuidComponents.error = uuidComponents.error || \"UUID is not valid.\";\n }\n return uuidComponents;\n },\n serialize: function serialize(uuidComponents, options) {\n var urnComponents = uuidComponents;\n //normalize UUID\n urnComponents.nss = (uuidComponents.uuid || \"\").toLowerCase();\n return urnComponents;\n }\n};\n\nSCHEMES[handler.scheme] = handler;\nSCHEMES[handler$1.scheme] = handler$1;\nSCHEMES[handler$2.scheme] = handler$2;\nSCHEMES[handler$3.scheme] = handler$3;\nSCHEMES[handler$4.scheme] = handler$4;\nSCHEMES[handler$5.scheme] = handler$5;\nSCHEMES[handler$6.scheme] = handler$6;\n\nexports.SCHEMES = SCHEMES;\nexports.pctEncChar = pctEncChar;\nexports.pctDecChars = pctDecChars;\nexports.parse = parse;\nexports.removeDotSegments = removeDotSegments;\nexports.serialize = serialize;\nexports.resolveComponents = resolveComponents;\nexports.resolve = resolve;\nexports.normalize = normalize;\nexports.equal = equal;\nexports.escapeComponent = escapeComponent;\nexports.unescapeComponent = unescapeComponent;\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=uri.all.js.map\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment)\n node.comment = comment;\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n // TODO: assert being at last item?\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n map.range = [bm.offset, offset, offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n offset = props.end;\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n // TODO: assert being at last item?\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, offset, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.min(end.col - col, 80 - ci);\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start} ${lines.join(' ')} ${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, commentString(comment), indent);\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vcb = '';\n let valueComment = null;\n if (Node.isNode(value)) {\n if (value.spaceBefore)\n vcb = '\\n';\n if (value.commentBefore) {\n const cs = commentString(value.commentBefore);\n vcb += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n valueComment = value.comment;\n }\n else if (value && typeof value === 'object') {\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substr(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (vcb || keyComment) {\n if (valueStr === '' && !ctx.inFlow)\n ws = vcb === '\\n' ? '\\n\\n' : vcb;\n else\n ws = `${vcb}\\n${ctx.indent}`;\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const flow = valueStr[0] === '[' || valueStr[0] === '{';\n if (!flow || valueStr.includes('\\n'))\n ws = `\\n${ctx.indent}`;\n }\n else if (valueStr === '' || valueStr[0] === '\\n')\n ws = '';\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx) => ({\n indentAtStart: ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (indent === '' && containsDocumentMarker(value)) {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"constants\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACllCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACpEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC9IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACjPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACjLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC7fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxmBA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AClIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1OA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC/bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChqDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChMA;;;A;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvBA;AACA;AACA;AACA,EACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACl6CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC3OA;;;A;;;;;;A;;A;;;;;;A;;A;;;;;;A;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;ACDA;AACA;AACA;AACA;;;;A","sourceRoot":""} \ No newline at end of file diff --git a/.github/actions/verify-chart-changelog/dist/licenses.txt b/.github/actions/verify-chart-changelog/dist/licenses.txt deleted file mode 100644 index 0e13f96b..00000000 --- a/.github/actions/verify-chart-changelog/dist/licenses.txt +++ /dev/null @@ -1,836 +0,0 @@ -@actions/core -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/github -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/http-client -MIT -Actions Http Client for Node.js - -Copyright (c) GitHub, Inc. - -All rights reserved. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -associated documentation files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT -LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/auth-token -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/core -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/endpoint -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/graphql -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/plugin-paginate-rest -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/plugin-rest-endpoint-methods -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/request -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/request-error -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@vercel/ncc -MIT -Copyright 2018 ZEIT, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -ajv -MIT -The MIT License (MIT) - -Copyright (c) 2015-2021 Evgeny Poberezkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - - -ajv-formats -MIT -MIT License - -Copyright (c) 2020 Evgeny Poberezkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -before-after-hook -Apache-2.0 - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2018 Gregor Martynus and other contributors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - -deprecation -ISC -The ISC License - -Copyright (c) Gregor Martynus and contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -fast-deep-equal -MIT -MIT License - -Copyright (c) 2017 Evgeny Poberezkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -fs-extra -MIT -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -graceful-fs -ISC -The ISC License - -Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -is-plain-object -MIT -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -json-schema-traverse -MIT -MIT License - -Copyright (c) 2017 Evgeny Poberezkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -jsonfile -MIT -(The MIT License) - -Copyright (c) 2012-2015, JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -node-fetch -MIT -The MIT License (MIT) - -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - - -once -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -tr46 -MIT - -tunnel -MIT -The MIT License (MIT) - -Copyright (c) 2012 Koichi Kobayashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -universal-user-agent -ISC -# [ISC License](https://spdx.org/licenses/ISC) - -Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -universalify -MIT -(The MIT License) - -Copyright (c) 2017, Ryan Zimmerman - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the 'Software'), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -uri-js -BSD-2-Clause -Copyright 2011 Gary Court. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY GARY COURT "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of Gary Court. - - -webidl-conversions -BSD-2-Clause -# The BSD 2-Clause License - -Copyright (c) 2014, Domenic Denicola -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -whatwg-url -MIT -The MIT License (MIT) - -Copyright (c) 2015–2016 Sebastian Mayr - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -wrappy -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -yaml -ISC -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/.github/actions/verify-chart-changelog/dist/sourcemap-register.js b/.github/actions/verify-chart-changelog/dist/sourcemap-register.js deleted file mode 100644 index 56566f1d..00000000 --- a/.github/actions/verify-chart-changelog/dist/sourcemap-register.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},284:(e,r,n)=>{e=n.nmd(e);var t=n(596).SourceMapConsumer;var o=n(622);var i;try{i=n(747);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(process.version)?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var a=n(837).I;var u=n(215);var s=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var a=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},747:e=>{"use strict";e.exports=require("fs")},622:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); \ No newline at end of file diff --git a/.github/actions/verify-chart-changelog/package-lock.json b/.github/actions/verify-chart-changelog/package-lock.json deleted file mode 100644 index bcaa78fc..00000000 --- a/.github/actions/verify-chart-changelog/package-lock.json +++ /dev/null @@ -1,803 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "verify-chart-version-bump", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "ajv": "^8.11.0", - "ajv-formats": "^2.1.1", - "fs-extra": "^10.1.0", - "json-schema-to-ts": "^2.5.5", - "semver": "^7.3.7", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@types/semver": "7.3.12", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } - }, - "node_modules/@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "dependencies": { - "@actions/http-client": "^2.0.1" - } - }, - "node_modules/@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "dependencies": { - "tunnel": "^0.0.6" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" - }, - "node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "node_modules/@types/semver": { - "version": "7.3.12", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.12.tgz", - "integrity": "sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==", - "dev": true - }, - "node_modules/@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true, - "bin": { - "ncc": "dist/ncc/cli.js" - } - }, - "node_modules/ajv": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", - "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", - "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", - "dependencies": { - "ajv": "^8.0.0" - }, - "peerDependencies": { - "ajv": "^8.0.0" - }, - "peerDependenciesMeta": { - "ajv": { - "optional": true - } - } - }, - "node_modules/before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - }, - "node_modules/fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/json-schema-to-ts": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/json-schema-to-ts/-/json-schema-to-ts-2.5.5.tgz", - "integrity": "sha512-GFD5t0fUnX/B0gE9xbHjxv2BwFXRJND2+OKoLoMElJ3XRJ7dOBlLT7KXpg96aETeZ0RJbAZOfqHALBf5k4aIIA==", - "dependencies": { - "@types/json-schema": "^7.0.9", - "ts-algebra": "^1.1.1", - "ts-toolbelt": "^9.6.0" - } - }, - "node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "engines": { - "node": ">=6" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/ts-algebra": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-1.1.1.tgz", - "integrity": "sha512-W43a3/BN0Tp4SgRNERQF/QPVuY1rnHkgCr/fISLY0Ycu05P0NWPYRuViU8JFn+pFZuY6/zp9TgET1fxMzppR/Q==", - "dependencies": { - "ts-toolbelt": "^9.6.0" - } - }, - "node_modules/ts-toolbelt": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-9.6.0.tgz", - "integrity": "sha512-nsZd8ZeNUzukXPlJmTBwUAuABDe/9qtVDelJeT/qW0ow3ZS3BsQJtNkan1802aM9Uf68/Y8ljw86Hu0h5IUW3w==" - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "node_modules/yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==", - "engines": { - "node": ">= 14" - } - } - }, - "dependencies": { - "@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "requires": { - "@actions/http-client": "^2.0.1" - } - }, - "@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "requires": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "requires": { - "tunnel": "^0.0.6" - } - }, - "@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "requires": { - "@octokit/types": "^6.0.3" - } - }, - "@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "requires": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "requires": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "requires": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "requires": { - "@octokit/types": "^6.40.0" - } - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "requires": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - } - }, - "@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "requires": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "requires": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" - }, - "@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "@types/semver": { - "version": "7.3.12", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.12.tgz", - "integrity": "sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==", - "dev": true - }, - "@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true - }, - "ajv": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", - "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", - "requires": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - } - }, - "ajv-formats": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", - "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", - "requires": { - "ajv": "^8.0.0" - } - }, - "before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - }, - "fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "requires": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "json-schema-to-ts": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/json-schema-to-ts/-/json-schema-to-ts-2.5.5.tgz", - "integrity": "sha512-GFD5t0fUnX/B0gE9xbHjxv2BwFXRJND2+OKoLoMElJ3XRJ7dOBlLT7KXpg96aETeZ0RJbAZOfqHALBf5k4aIIA==", - "requires": { - "@types/json-schema": "^7.0.9", - "ts-algebra": "^1.1.1", - "ts-toolbelt": "^9.6.0" - } - }, - "json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" - }, - "require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" - }, - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "ts-algebra": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-1.1.1.tgz", - "integrity": "sha512-W43a3/BN0Tp4SgRNERQF/QPVuY1rnHkgCr/fISLY0Ycu05P0NWPYRuViU8JFn+pFZuY6/zp9TgET1fxMzppR/Q==", - "requires": { - "ts-toolbelt": "^9.6.0" - } - }, - "ts-toolbelt": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-9.6.0.tgz", - "integrity": "sha512-nsZd8ZeNUzukXPlJmTBwUAuABDe/9qtVDelJeT/qW0ow3ZS3BsQJtNkan1802aM9Uf68/Y8ljw86Hu0h5IUW3w==" - }, - "tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" - }, - "typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true - }, - "universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "requires": { - "punycode": "^2.1.0" - } - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==" - } - } -} diff --git a/.github/actions/verify-chart-changelog/package.json b/.github/actions/verify-chart-changelog/package.json deleted file mode 100644 index 44a8ff6d..00000000 --- a/.github/actions/verify-chart-changelog/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "description": "", - "main": "lib/main.js", - "scripts": { - "build": "tsc", - "package": "npm run build && NODE_OPTIONS=--openssl-legacy-provider ncc build --source-map --license licenses.txt" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "ajv": "^8.11.0", - "ajv-formats": "^2.1.1", - "fs-extra": "^10.1.0", - "json-schema-to-ts": "^2.5.5", - "semver": "^7.3.7", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@types/semver": "7.3.12", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } -} diff --git a/.github/actions/verify-chart-changelog/src/main.ts b/.github/actions/verify-chart-changelog/src/main.ts deleted file mode 100644 index b51abc64..00000000 --- a/.github/actions/verify-chart-changelog/src/main.ts +++ /dev/null @@ -1,149 +0,0 @@ -import * as core from "@actions/core"; -import * as github from "@actions/github"; -import * as YAML from "yaml"; -import { validateAgainstJsonSchema, changelogEntrySchema } from "./schemas"; -import { FromSchema } from "json-schema-to-ts"; -import * as fs from "fs-extra"; - -function getErrorMessage(error: unknown) { - if (error instanceof Error) return error.message; - return String(error); -} - -function getChangelogFromYaml(chartYaml: any): string | undefined { - const changelogAnnotation = "artifacthub.io/changes"; - if (chartYaml.annotations) { - if (chartYaml.annotations[changelogAnnotation]) { - return chartYaml.annotations[changelogAnnotation]; - } - } - return undefined; -} - -async function getChartYamlFromRepo( - path: string, - ref: string, - token: string -): Promise { - let result = {}; - const octokit = github.getOctokit(token); - const chartYamlFile = await octokit.rest.repos.getContent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - path: `${path}`, - ref: ref, - }); - - if (chartYamlFile && "content" in chartYamlFile.data) { - const originalChartYamlContent = Buffer.from( - chartYamlFile.data.content, - "base64" - ).toString("utf-8"); - result = await YAML.parse(originalChartYamlContent); - } - return result; -} - -async function getChartYamlFromFile(path: string): Promise { - const chartYamlFile = await fs.readFile(path, "utf8"); - return YAML.parse(chartYamlFile); -} - -async function checkRefExists(ref: string, token: string): Promise { - const octokit = github.getOctokit(token); - try { - await octokit.rest.git.getRef({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - ref: ref, - }); - } catch (error) { - core.setFailed(`Ref ${ref} was not found for this repository!`); - return false; - } - return true; -} - -type changelogEntry = FromSchema; - -async function run() { - try { - if (github.context.eventName !== "pull_request") { - core.setFailed("This action can only run on pull requests!"); - return; - } - - // Gather inputs - const githubToken = core.getInput("token"); - const chart = core.getInput("chart", { required: true }); - const base = core.getInput("base", { required: false }); - - // Verify the base ref exists - if (base && !(await checkRefExists(base, githubToken))) { - core.setFailed(`Ref ${base} was not found for this repository!`); - return; - } - - // Determine the default branch - const defaultBranch = github.context.payload.repository?.default_branch; - - // Validate the chart - const chartYamlPath = `${chart}/Chart.yaml`; - - if (!(await fs.pathExists(chartYamlPath))) { - core.setFailed(`${chart} is not a valid Helm chart folder!`); - return; - } - - var originalChartYaml; - var originalChartChangelog: string | undefined; - try { - originalChartYaml = await getChartYamlFromRepo( - chartYamlPath, - base || `heads/${defaultBranch}`, - githubToken - ); - originalChartChangelog = getChangelogFromYaml(originalChartYaml); - } catch (error) { - core.warning( - `Could not find original Chart.yaml for ${chart}, assuming this is a new chart.` - ); - } - - const updatedChartYaml = await getChartYamlFromFile(chartYamlPath); - const updatedChartChangelog = getChangelogFromYaml(updatedChartYaml); - if (!updatedChartChangelog) { - core.setFailed(`${chartYamlPath} does not contain a changelog!`); - return; - } - - // Check if the changelog was updated - if (originalChartChangelog) { - if (updatedChartChangelog == originalChartChangelog) { - core.setFailed( - `Chart changelog has not been updated!` - ); - return; - } - } - - // Validate the changelog entries - const changelogEntries: changelogEntry[] = YAML.parse( - updatedChartChangelog - ); - changelogEntries.forEach((entry: changelogEntry) => { - const validator = validateAgainstJsonSchema(entry, changelogEntrySchema); - if (!validator.valid) { - validator.errors?.forEach((error: any) => { - core.setFailed( - `${chart} changelog validation failed: ${JSON.stringify(error)}` - ); - }); - } - }); - } catch (error) { - core.setFailed(getErrorMessage(error)); - } -} - -run(); diff --git a/.github/actions/verify-chart-changelog/src/schemas.ts b/.github/actions/verify-chart-changelog/src/schemas.ts deleted file mode 100644 index 906a900d..00000000 --- a/.github/actions/verify-chart-changelog/src/schemas.ts +++ /dev/null @@ -1,44 +0,0 @@ -import Ajv from "ajv"; -import addFormats from "ajv-formats"; - -export const changelogEntrySchema = { - type: "object", - properties: { - kind: { - type: "string", - enum: ["added", "changed", "deprecated", "removed", "fixed", "security"], - }, - description: { type: "string" }, - links: { - type: "array", - items: { - type: "object", - properties: { - name: { type: "string" }, - url: { $ref: "#/definitions/saneUrl" }, - }, - required: ["name", "url"], - additionalProperties: false, - }, - }, - }, - required: ["kind", "description"], - additionalProperties: false, - definitions: { - saneUrl: { - type: "string", - format: "uri", - pattern: "^https?://", - }, - }, -} as const; - -export function validateAgainstJsonSchema(object: any, schema: typeof changelogEntrySchema) { - const ajv = new Ajv(); - addFormats(ajv); - const validator = ajv.compile(schema); - return { - valid: validator(object), - errors: validator.errors, - }; -} diff --git a/.github/actions/verify-chart-changelog/tsconfig.json b/.github/actions/verify-chart-changelog/tsconfig.json deleted file mode 100644 index f6e7cb5b..00000000 --- a/.github/actions/verify-chart-changelog/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ - "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ - "outDir": "./lib", /* Redirect output structure to the directory. */ - "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ - "strict": true, /* Enable all strict type-checking options. */ - "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ - "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - }, - "exclude": ["node_modules", "**/*.test.ts"] -} diff --git a/.github/actions/verify-chart-version/.gitignore b/.github/actions/verify-chart-version/.gitignore deleted file mode 100644 index 3b4e8dcf..00000000 --- a/.github/actions/verify-chart-version/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -lib/ -node_modules/ diff --git a/.github/actions/verify-chart-version/action.yml b/.github/actions/verify-chart-version/action.yml deleted file mode 100644 index 67dca9aa..00000000 --- a/.github/actions/verify-chart-version/action.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: 'Verify Helm chart version bump' -description: 'Checks if the Helm chart version number has been bumped' -inputs: - chart: - description: 'Which chart to check' - required: true - base: - description: 'Ref to compare against' - required: false - token: - description: 'GitHub token' - default: '${{ github.token }}' - required: true -runs: - using: 'node16' - main: 'dist/index.js' diff --git a/.github/actions/verify-chart-version/dist/index.js b/.github/actions/verify-chart-version/dist/index.js deleted file mode 100644 index f378d80c..00000000 --- a/.github/actions/verify-chart-version/dist/index.js +++ /dev/null @@ -1,23659 +0,0 @@ -require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap -/******/ var __webpack_modules__ = ({ - -/***/ 3109: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -const YAML = __importStar(__nccwpck_require__(5065)); -const semver = __importStar(__nccwpck_require__(1383)); -const fs = __importStar(__nccwpck_require__(5630)); -function getErrorMessage(error) { - if (error instanceof Error) - return error.message; - return String(error); -} -function run() { - var _a; - return __awaiter(this, void 0, void 0, function* () { - try { - if (github.context.eventName !== "pull_request") { - core.setFailed("This action can only run on pull requests!"); - return; - } - const githubToken = core.getInput("token"); - const chart = core.getInput("chart", { required: true }); - const base = core.getInput("base", { required: false }); - const chartYamlPath = `${chart}/Chart.yaml`; - const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; - const octokit = github.getOctokit(githubToken); - if (!(yield fs.pathExists(chartYamlPath))) { - core.setFailed(`${chart} is not a valid Helm chart folder!`); - return; - } - if (base) { - try { - yield octokit.rest.git.getRef({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - ref: base, - }); - } - catch (error) { - core.setFailed(`Ref ${base} was not found for this repository!`); - return; - } - } - var originalChartYamlFile; - var originalChartVersion; - try { - originalChartYamlFile = yield octokit.rest.repos.getContent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - path: `${chartYamlPath}`, - ref: base || `heads/${defaultBranch}`, - }); - } - catch (error) { - core.warning(`Could not find original Chart.yaml for ${chart}, assuming this is a new chart.`); - } - if (originalChartYamlFile && "content" in originalChartYamlFile.data) { - const originalChartYamlContent = Buffer.from(originalChartYamlFile.data.content, "base64").toString("utf-8"); - const originalChartYaml = yield YAML.parse(originalChartYamlContent); - originalChartVersion = originalChartYaml.version; - } - const updatedChartYamlContent = yield fs.readFile(chartYamlPath, "utf8"); - const updatedChartYaml = yield YAML.parse(updatedChartYamlContent); - if (!updatedChartYaml.version) { - core.setFailed(`${chartYamlPath} does not contain a version!`); - return; - } - const updatedChartVersion = updatedChartYaml.version; - if (!semver.valid(updatedChartVersion)) { - core.setFailed(`${updatedChartVersion} is not a valid SemVer version!`); - return; - } - if (originalChartVersion) { - if (updatedChartVersion == originalChartVersion) { - core.setFailed(`Chart version has not been updated!`); - return; - } - if (!semver.gt(updatedChartVersion, originalChartVersion)) { - core.setFailed(`Updated chart version ${updatedChartVersion} is < ${originalChartVersion}!`); - return; - } - core.info(`Old chart version: ${originalChartVersion}`); - } - core.info(`New chart version: ${updatedChartVersion}`); - core.info(`New chart version verified succesfully.`); - } - catch (error) { - core.setFailed(getErrorMessage(error)); - } - }); -} -run(); - - -/***/ }), - -/***/ 7351: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 2186: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(7351); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2087)); -const path = __importStar(__nccwpck_require__(5622)); -const oidc_utils_1 = __nccwpck_require__(8041); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); - } -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueCommand('PATH', inputPath); - } - else { - command_1.issueCommand('add-path', {}, inputPath); - } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Gets the values of an multiline input. Each value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string[] - * - */ -function getMultilineInput(name, options) { - const inputs = getInput(name, options) - .split('\n') - .filter(x => x !== ''); - return inputs; -} -exports.getMultilineInput = getMultilineInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds a warning issue - * @param message warning issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Adds a notice issue - * @param message notice issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.notice = notice; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -function getIDToken(aud) { - return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); -} -exports.getIDToken = getIDToken; -/** - * Summary exports - */ -var summary_1 = __nccwpck_require__(1327); -Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); -/** - * @deprecated use core.summary - */ -var summary_2 = __nccwpck_require__(1327); -Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); -/** - * Path exports - */ -var path_utils_1 = __nccwpck_require__(2981); -Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); -Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); -Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 717: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(5747)); -const os = __importStar(__nccwpck_require__(2087)); -const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map - -/***/ }), - -/***/ 8041: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); -class OidcClient { - static createHttpClient(allowRetry = true, maxRetry = 10) { - const requestOptions = { - allowRetries: allowRetry, - maxRetries: maxRetry - }; - return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); - } - static getRequestToken() { - const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); - } - return token; - } - static getIDTokenUrl() { - const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); - } - return runtimeUrl; - } - static getCall(id_token_url) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const httpclient = OidcClient.createHttpClient(); - const res = yield httpclient - .getJson(id_token_url) - .catch(error => { - throw new Error(`Failed to get ID Token. \n - Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); - }); - const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; - if (!id_token) { - throw new Error('Response json body do not have ID Token field'); - } - return id_token; - }); - } - static getIDToken(audience) { - return __awaiter(this, void 0, void 0, function* () { - try { - // New ID Token is requested from action service - let id_token_url = OidcClient.getIDTokenUrl(); - if (audience) { - const encodedAudience = encodeURIComponent(audience); - id_token_url = `${id_token_url}&audience=${encodedAudience}`; - } - core_1.debug(`ID token url is ${id_token_url}`); - const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); - return id_token; - } - catch (error) { - throw new Error(`Error message: ${error.message}`); - } - }); - } -} -exports.OidcClient = OidcClient; -//# sourceMappingURL=oidc-utils.js.map - -/***/ }), - -/***/ 2981: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(5622)); -/** - * toPosixPath converts the given path to the posix form. On Windows, \\ will be - * replaced with /. - * - * @param pth. Path to transform. - * @return string Posix path. - */ -function toPosixPath(pth) { - return pth.replace(/[\\]/g, '/'); -} -exports.toPosixPath = toPosixPath; -/** - * toWin32Path converts the given path to the win32 form. On Linux, / will be - * replaced with \\. - * - * @param pth. Path to transform. - * @return string Win32 path. - */ -function toWin32Path(pth) { - return pth.replace(/[/]/g, '\\'); -} -exports.toWin32Path = toWin32Path; -/** - * toPlatformPath converts the given path to a platform-specific path. It does - * this by replacing instances of / and \ with the platform-specific path - * separator. - * - * @param pth The path to platformize. - * @return string The platform-specific path. - */ -function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path.sep); -} -exports.toPlatformPath = toPlatformPath; -//# sourceMappingURL=path-utils.js.map - -/***/ }), - -/***/ 1327: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(2087); -const fs_1 = __nccwpck_require__(5747); -const { access, appendFile, writeFile } = fs_1.promises; -exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; -exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; -class Summary { - constructor() { - this._buffer = ''; - } - /** - * Finds the summary file path from the environment, rejects if env var is not found or file does not exist - * Also checks r/w permissions. - * - * @returns step summary file path - */ - filePath() { - return __awaiter(this, void 0, void 0, function* () { - if (this._filePath) { - return this._filePath; - } - const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; - if (!pathFromEnv) { - throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); - } - try { - yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); - } - catch (_a) { - throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); - } - this._filePath = pathFromEnv; - return this._filePath; - }); - } - /** - * Wraps content in an HTML tag, adding any HTML attributes - * - * @param {string} tag HTML tag to wrap - * @param {string | null} content content within the tag - * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add - * - * @returns {string} content wrapped in HTML element - */ - wrap(tag, content, attrs = {}) { - const htmlAttrs = Object.entries(attrs) - .map(([key, value]) => ` ${key}="${value}"`) - .join(''); - if (!content) { - return `<${tag}${htmlAttrs}>`; - } - return `<${tag}${htmlAttrs}>${content}`; - } - /** - * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. - * - * @param {SummaryWriteOptions} [options] (optional) options for write operation - * - * @returns {Promise} summary instance - */ - write(options) { - return __awaiter(this, void 0, void 0, function* () { - const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); - const filePath = yield this.filePath(); - const writeFunc = overwrite ? writeFile : appendFile; - yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); - return this.emptyBuffer(); - }); - } - /** - * Clears the summary buffer and wipes the summary file - * - * @returns {Summary} summary instance - */ - clear() { - return __awaiter(this, void 0, void 0, function* () { - return this.emptyBuffer().write({ overwrite: true }); - }); - } - /** - * Returns the current summary buffer as a string - * - * @returns {string} string of summary buffer - */ - stringify() { - return this._buffer; - } - /** - * If the summary buffer is empty - * - * @returns {boolen} true if the buffer is empty - */ - isEmptyBuffer() { - return this._buffer.length === 0; - } - /** - * Resets the summary buffer without writing to summary file - * - * @returns {Summary} summary instance - */ - emptyBuffer() { - this._buffer = ''; - return this; - } - /** - * Adds raw text to the summary buffer - * - * @param {string} text content to add - * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) - * - * @returns {Summary} summary instance - */ - addRaw(text, addEOL = false) { - this._buffer += text; - return addEOL ? this.addEOL() : this; - } - /** - * Adds the operating system-specific end-of-line marker to the buffer - * - * @returns {Summary} summary instance - */ - addEOL() { - return this.addRaw(os_1.EOL); - } - /** - * Adds an HTML codeblock to the summary buffer - * - * @param {string} code content to render within fenced code block - * @param {string} lang (optional) language to syntax highlight code - * - * @returns {Summary} summary instance - */ - addCodeBlock(code, lang) { - const attrs = Object.assign({}, (lang && { lang })); - const element = this.wrap('pre', this.wrap('code', code), attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML list to the summary buffer - * - * @param {string[]} items list of items to render - * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) - * - * @returns {Summary} summary instance - */ - addList(items, ordered = false) { - const tag = ordered ? 'ol' : 'ul'; - const listItems = items.map(item => this.wrap('li', item)).join(''); - const element = this.wrap(tag, listItems); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML table to the summary buffer - * - * @param {SummaryTableCell[]} rows table rows - * - * @returns {Summary} summary instance - */ - addTable(rows) { - const tableBody = rows - .map(row => { - const cells = row - .map(cell => { - if (typeof cell === 'string') { - return this.wrap('td', cell); - } - const { header, data, colspan, rowspan } = cell; - const tag = header ? 'th' : 'td'; - const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); - return this.wrap(tag, data, attrs); - }) - .join(''); - return this.wrap('tr', cells); - }) - .join(''); - const element = this.wrap('table', tableBody); - return this.addRaw(element).addEOL(); - } - /** - * Adds a collapsable HTML details element to the summary buffer - * - * @param {string} label text for the closed state - * @param {string} content collapsable content - * - * @returns {Summary} summary instance - */ - addDetails(label, content) { - const element = this.wrap('details', this.wrap('summary', label) + content); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML image tag to the summary buffer - * - * @param {string} src path to the image you to embed - * @param {string} alt text description of the image - * @param {SummaryImageOptions} options (optional) addition image attributes - * - * @returns {Summary} summary instance - */ - addImage(src, alt, options) { - const { width, height } = options || {}; - const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); - const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML section heading element - * - * @param {string} text heading text - * @param {number | string} [level=1] (optional) the heading level, default: 1 - * - * @returns {Summary} summary instance - */ - addHeading(text, level) { - const tag = `h${level}`; - const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) - ? tag - : 'h1'; - const element = this.wrap(allowedTag, text); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML thematic break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addSeparator() { - const element = this.wrap('hr', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML line break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addBreak() { - const element = this.wrap('br', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML blockquote to the summary buffer - * - * @param {string} text quote text - * @param {string} cite (optional) citation url - * - * @returns {Summary} summary instance - */ - addQuote(text, cite) { - const attrs = Object.assign({}, (cite && { cite })); - const element = this.wrap('blockquote', text, attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML anchor tag to the summary buffer - * - * @param {string} text link text/content - * @param {string} href hyperlink - * - * @returns {Summary} summary instance - */ - addLink(text, href) { - const element = this.wrap('a', text, { href }); - return this.addRaw(element).addEOL(); - } -} -const _summary = new Summary(); -/** - * @deprecated use `core.summary` - */ -exports.markdownSummary = _summary; -exports.summary = _summary; -//# sourceMappingURL=summary.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandProperties = exports.toCommandValue = void 0; -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -/** - * - * @param annotationProperties - * @returns The command properties to send with the actual annotation command - * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 - */ -function toCommandProperties(annotationProperties) { - if (!Object.keys(annotationProperties).length) { - return {}; - } - return { - title: annotationProperties.title, - file: annotationProperties.file, - line: annotationProperties.startLine, - endLine: annotationProperties.endLine, - col: annotationProperties.startColumn, - endColumn: annotationProperties.endColumn - }; -} -exports.toCommandProperties = toCommandProperties; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(5747); -const os_1 = __nccwpck_require__(2087); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 5438: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); -} -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 7914: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6255)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 3030: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); -// octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 5526: -/***/ (function(__unused_webpack_module, exports) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map - -/***/ }), - -/***/ 6255: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(8605)); -const https = __importStar(__nccwpck_require__(7211)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } - } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); - } -} -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 9835: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; -//# sourceMappingURL=proxy.js.map - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -const Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] - }, - dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], - addEmailForAuthenticatedUser: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { - rest: api - }); -} -legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - let headers; - - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } // redact request credentials without mutating original request options - - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - - Object.defineProperty(this, "code", { - get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); - return headers || {}; - } - - }); - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); - -const VERSION = "5.6.3"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: undefined - }, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - - if (status >= 400) { - const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - - return getResponseData(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { - request: requestOptions - }); - }); -} - -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); -} - -function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; - } - - return data.message; - } // istanbul ignore next - just in case - - - return `Unknown error: ${JSON.stringify(data)}`; -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var register = __nccwpck_require__(4670) -var addHook = __nccwpck_require__(5549) -var removeHook = __nccwpck_require__(6819) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 5549: -/***/ ((module) => { - -module.exports = addHook; - -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } - - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; - } - - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; - } - - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; - } - - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} - - -/***/ }), - -/***/ 4670: -/***/ ((module) => { - -module.exports = register; - -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - - if (!options) { - options = {}; - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); - } - - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } - - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); -} - - -/***/ }), - -/***/ 6819: -/***/ ((module) => { - -module.exports = removeHook; - -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - - var index = state.registry[name] - .map(function (registered) { - return registered.orig; - }) - .indexOf(method); - - if (index === -1) { - return; - } - - state.registry[name].splice(index, 1); -} - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 9618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirsSync = __nccwpck_require__(2915).mkdirsSync -const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync -const stat = __nccwpck_require__(3901) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0002' - ) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - return handleFilterAndCopy(destStat, src, dest, opts) -} - -function handleFilterAndCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return startCopy(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), - -/***/ 8834: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirs = __nccwpck_require__(2915).mkdirs -const pathExists = __nccwpck_require__(3835).pathExists -const utimesMillis = __nccwpck_require__(2548).utimesMillis -const stat = __nccwpck_require__(3901) - -function copy (src, dest, opts, cb) { - if (typeof opts === 'function' && !cb) { - cb = opts - opts = {} - } else if (typeof opts === 'function') { - opts = { filter: opts } - } - - cb = cb || function () {} - opts = opts || {} - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0001' - ) - } - - stat.checkPaths(src, dest, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'copy', err => { - if (err) return cb(err) - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) - return checkParentDir(destStat, src, dest, opts, cb) - }) - }) -} - -function checkParentDir (destStat, src, dest, opts, cb) { - const destParent = path.dirname(dest) - pathExists(destParent, (err, dirExists) => { - if (err) return cb(err) - if (dirExists) return getStats(destStat, src, dest, opts, cb) - mkdirs(destParent, err => { - if (err) return cb(err) - return getStats(destStat, src, dest, opts, cb) - }) - }) -} - -function handleFilter (onInclude, destStat, src, dest, opts, cb) { - Promise.resolve(opts.filter(src, dest)).then(include => { - if (include) return onInclude(destStat, src, dest, opts, cb) - return cb() - }, error => cb(error)) -} - -function startCopy (destStat, src, dest, opts, cb) { - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) - return getStats(destStat, src, dest, opts, cb) -} - -function getStats (destStat, src, dest, opts, cb) { - const stat = opts.dereference ? fs.stat : fs.lstat - stat(src, (err, srcStat) => { - if (err) return cb(err) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) - else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) - else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) - return cb(new Error(`Unknown file: ${src}`)) - }) -} - -function onFile (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return copyFile(srcStat, src, dest, opts, cb) - return mayCopyFile(srcStat, src, dest, opts, cb) -} - -function mayCopyFile (srcStat, src, dest, opts, cb) { - if (opts.overwrite) { - fs.unlink(dest, err => { - if (err) return cb(err) - return copyFile(srcStat, src, dest, opts, cb) - }) - } else if (opts.errorOnExist) { - return cb(new Error(`'${dest}' already exists`)) - } else return cb() -} - -function copyFile (srcStat, src, dest, opts, cb) { - fs.copyFile(src, dest, err => { - if (err) return cb(err) - if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) - return setDestMode(dest, srcStat.mode, cb) - }) -} - -function handleTimestampsAndMode (srcMode, src, dest, cb) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - return makeFileWritable(dest, srcMode, err => { - if (err) return cb(err) - return setDestTimestampsAndMode(srcMode, src, dest, cb) - }) - } - return setDestTimestampsAndMode(srcMode, src, dest, cb) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode, cb) { - return setDestMode(dest, srcMode | 0o200, cb) -} - -function setDestTimestampsAndMode (srcMode, src, dest, cb) { - setDestTimestamps(src, dest, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) -} - -function setDestMode (dest, srcMode, cb) { - return fs.chmod(dest, srcMode, cb) -} - -function setDestTimestamps (src, dest, cb) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - fs.stat(src, (err, updatedSrcStat) => { - if (err) return cb(err) - return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) - }) -} - -function onDir (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) - return copyDir(src, dest, opts, cb) -} - -function mkDirAndCopy (srcMode, src, dest, opts, cb) { - fs.mkdir(dest, err => { - if (err) return cb(err) - copyDir(src, dest, opts, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) - }) -} - -function copyDir (src, dest, opts, cb) { - fs.readdir(src, (err, items) => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) -} - -function copyDirItems (items, src, dest, opts, cb) { - const item = items.pop() - if (!item) return cb() - return copyDirItem(items, item, src, dest, opts, cb) -} - -function copyDirItem (items, item, src, dest, opts, cb) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { destStat } = stats - startCopy(destStat, srcItem, destItem, opts, err => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) - }) -} - -function onLink (destStat, src, dest, opts, cb) { - fs.readlink(src, (err, resolvedSrc) => { - if (err) return cb(err) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlink(resolvedSrc, dest, cb) - } else { - fs.readlink(dest, (err, resolvedDest) => { - if (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) - return cb(err) - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) - } - return copyLink(resolvedSrc, dest, cb) - }) - } - }) -} - -function copyLink (resolvedSrc, dest, cb) { - fs.unlink(dest, err => { - if (err) return cb(err) - return fs.symlink(resolvedSrc, dest, cb) - }) -} - -module.exports = copy - - -/***/ }), - -/***/ 1335: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - copy: u(__nccwpck_require__(8834)), - copySync: __nccwpck_require__(9618) -} - - -/***/ }), - -/***/ 6970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const remove = __nccwpck_require__(7357) - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} - - -/***/ }), - -/***/ 2164: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) - -function createFile (file, callback) { - function makeFile () { - fs.writeFile(file, '', err => { - if (err) return callback(err) - callback() - }) - } - - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err - if (!err && stats.isFile()) return callback() - const dir = path.dirname(file) - fs.stat(dir, (err, stats) => { - if (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - return mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeFile() - }) - } - return callback(err) - } - - if (stats.isDirectory()) makeFile() - else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdir(dir, err => { - if (err) return callback(err) - }) - } - }) - }) -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch {} - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), - -/***/ 55: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { createFile, createFileSync } = __nccwpck_require__(2164) -const { createLink, createLinkSync } = __nccwpck_require__(3797) -const { createSymlink, createSymlinkSync } = __nccwpck_require__(2549) - -module.exports = { - // file - createFile, - createFileSync, - ensureFile: createFile, - ensureFileSync: createFileSync, - // link - createLink, - createLinkSync, - ensureLink: createLink, - ensureLinkSync: createLinkSync, - // symlink - createSymlink, - createSymlinkSync, - ensureSymlink: createSymlink, - ensureSymlinkSync: createSymlinkSync -} - - -/***/ }), - -/***/ 3797: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists -const { areIdentical } = __nccwpck_require__(3901) - -function createLink (srcpath, dstpath, callback) { - function makeLink (srcpath, dstpath) { - fs.link(srcpath, dstpath, err => { - if (err) return callback(err) - callback(null) - }) - } - - fs.lstat(dstpath, (_, dstStat) => { - fs.lstat(srcpath, (err, srcStat) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureLink') - return callback(err) - } - if (dstStat && areIdentical(srcStat, dstStat)) return callback(null) - - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return makeLink(srcpath, dstpath) - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeLink(srcpath, dstpath) - }) - }) - }) - }) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} - - -/***/ }), - -/***/ 3727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const pathExists = __nccwpck_require__(3835).pathExists - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -function symlinkPaths (srcpath, dstpath, callback) { - if (path.isAbsolute(srcpath)) { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: srcpath - }) - }) - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - return pathExists(relativeToDst, (err, exists) => { - if (err) return callback(err) - if (exists) { - return callback(null, { - toCwd: relativeToDst, - toDst: srcpath - }) - } else { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - }) - }) - } - }) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - let exists - if (path.isAbsolute(srcpath)) { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } else { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } - } - } -} - -module.exports = { - symlinkPaths, - symlinkPathsSync -} - - -/***/ }), - -/***/ 8254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function symlinkType (srcpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - if (type) return callback(null, type) - fs.lstat(srcpath, (err, stats) => { - if (err) return callback(null, 'file') - type = (stats && stats.isDirectory()) ? 'dir' : 'file' - callback(null, type) - }) -} - -function symlinkTypeSync (srcpath, type) { - let stats - - if (type) return type - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType, - symlinkTypeSync -} - - -/***/ }), - -/***/ 2549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(1176) -const _mkdirs = __nccwpck_require__(2915) -const mkdirs = _mkdirs.mkdirs -const mkdirsSync = _mkdirs.mkdirsSync - -const _symlinkPaths = __nccwpck_require__(3727) -const symlinkPaths = _symlinkPaths.symlinkPaths -const symlinkPathsSync = _symlinkPaths.symlinkPathsSync - -const _symlinkType = __nccwpck_require__(8254) -const symlinkType = _symlinkType.symlinkType -const symlinkTypeSync = _symlinkType.symlinkTypeSync - -const pathExists = __nccwpck_require__(3835).pathExists - -const { areIdentical } = __nccwpck_require__(3901) - -function createSymlink (srcpath, dstpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - - fs.lstat(dstpath, (err, stats) => { - if (!err && stats.isSymbolicLink()) { - Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]).then(([srcStat, dstStat]) => { - if (areIdentical(srcStat, dstStat)) return callback(null) - _createSymlink(srcpath, dstpath, type, callback) - }) - } else _createSymlink(srcpath, dstpath, type, callback) - }) -} - -function _createSymlink (srcpath, dstpath, type, callback) { - symlinkPaths(srcpath, dstpath, (err, relative) => { - if (err) return callback(err) - srcpath = relative.toDst - symlinkType(relative.toCwd, type, (err, type) => { - if (err) return callback(err) - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) - mkdirs(dir, err => { - if (err) return callback(err) - fs.symlink(srcpath, dstpath, type, callback) - }) - }) - }) - }) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch {} - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} - - -/***/ }), - -/***/ 1176: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.opendir was added in Node.js v12.12.0 - // fs.rm was added in Node.js v14.14.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// fs.writev only available in Node v12.9.0+ -if (typeof fs.writev === 'function') { - // Function signature is - // s.writev(fd, buffers[, position], callback) - // We need to handle the optional arg, so we use ...args - exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) - } -} - -// fs.realpath.native sometimes not available if fs is monkey-patched -if (typeof fs.realpath.native === 'function') { - exports.realpath.native = u(fs.realpath.native) -} else { - process.emitWarning( - 'fs.realpath.native is not a function. Is fs being monkey-patched?', - 'Warning', 'fs-extra-WARN0003' - ) -} - - -/***/ }), - -/***/ 5630: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__nccwpck_require__(1176), - // Export extra methods: - ...__nccwpck_require__(1335), - ...__nccwpck_require__(6970), - ...__nccwpck_require__(55), - ...__nccwpck_require__(213), - ...__nccwpck_require__(2915), - ...__nccwpck_require__(1497), - ...__nccwpck_require__(1832), - ...__nccwpck_require__(3835), - ...__nccwpck_require__(7357) -} - - -/***/ }), - -/***/ 213: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromPromise -const jsonFile = __nccwpck_require__(8970) - -jsonFile.outputJson = u(__nccwpck_require__(531)) -jsonFile.outputJsonSync = __nccwpck_require__(9421) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), - -/***/ 8970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const jsonFile = __nccwpck_require__(6160) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), - -/***/ 9421: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFileSync } = __nccwpck_require__(1832) - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync - - -/***/ }), - -/***/ 531: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFile } = __nccwpck_require__(1832) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson - - -/***/ }), - -/***/ 2915: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), - -/***/ 2751: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(1176) -const { checkPath } = __nccwpck_require__(9907) - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} - - -/***/ }), - -/***/ 9907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const path = __nccwpck_require__(5622) - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - - -/***/ }), - -/***/ 1497: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -module.exports = { - move: u(__nccwpck_require__(2231)), - moveSync: __nccwpck_require__(2047) -} - - -/***/ }), - -/***/ 2047: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copySync = __nccwpck_require__(1335).copySync -const removeSync = __nccwpck_require__(7357).removeSync -const mkdirpSync = __nccwpck_require__(2915).mkdirpSync -const stat = __nccwpck_require__(3901) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), - -/***/ 2231: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copy = __nccwpck_require__(1335).copy -const remove = __nccwpck_require__(7357).remove -const mkdirp = __nccwpck_require__(2915).mkdirp -const pathExists = __nccwpck_require__(3835).pathExists -const stat = __nccwpck_require__(3901) - -function move (src, dest, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - opts = opts || {} - - const overwrite = opts.overwrite || opts.clobber || false - - stat.checkPaths(src, dest, 'move', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, isChangingCase = false } = stats - stat.checkParentPaths(src, srcStat, dest, 'move', err => { - if (err) return cb(err) - if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) - mkdirp(path.dirname(dest), err => { - if (err) return cb(err) - return doRename(src, dest, overwrite, isChangingCase, cb) - }) - }) - }) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase, cb) { - if (isChangingCase) return rename(src, dest, overwrite, cb) - if (overwrite) { - return remove(dest, err => { - if (err) return cb(err) - return rename(src, dest, overwrite, cb) - }) - } - pathExists(dest, (err, destExists) => { - if (err) return cb(err) - if (destExists) return cb(new Error('dest already exists.')) - return rename(src, dest, overwrite, cb) - }) -} - -function rename (src, dest, overwrite, cb) { - fs.rename(src, dest, err => { - if (!err) return cb() - if (err.code !== 'EXDEV') return cb(err) - return moveAcrossDevice(src, dest, overwrite, cb) - }) -} - -function moveAcrossDevice (src, dest, overwrite, cb) { - const opts = { - overwrite, - errorOnExist: true - } - copy(src, dest, opts, err => { - if (err) return cb(err) - return remove(src, cb) - }) -} - -module.exports = move - - -/***/ }), - -/***/ 1832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(9046).fromCallback -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists - -function outputFile (file, data, encoding, callback) { - if (typeof encoding === 'function') { - callback = encoding - encoding = 'utf8' - } - - const dir = path.dirname(file) - pathExists(dir, (err, itDoes) => { - if (err) return callback(err) - if (itDoes) return fs.writeFile(file, data, encoding, callback) - - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - - fs.writeFile(file, data, encoding, callback) - }) - }) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (fs.existsSync(dir)) { - return fs.writeFileSync(file, ...args) - } - mkdir.mkdirsSync(dir) - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), - -/***/ 3835: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(9046).fromPromise -const fs = __nccwpck_require__(1176) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), - -/***/ 7357: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const u = __nccwpck_require__(9046).fromCallback -const rimraf = __nccwpck_require__(7247) - -function remove (path, callback) { - // Node 14.14.0+ - if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback) - rimraf(path, callback) -} - -function removeSync (path) { - // Node 14.14.0+ - if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true }) - rimraf.sync(path) -} - -module.exports = { - remove: u(remove), - removeSync -} - - -/***/ }), - -/***/ 7247: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const assert = __nccwpck_require__(2357) - -const isWindows = (process.platform === 'win32') - -function defaults (options) { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 -} - -function rimraf (p, options, cb) { - let busyTries = 0 - - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && - busyTries < options.maxBusyTries) { - busyTries++ - const time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), time) - } - - // already gone - if (er.code === 'ENOENT') er = null - } - - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === 'ENOENT') { - return cb(null) - } - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === 'EPERM' && isWindows) { - return fixWinEPERM(p, options, er, cb) - } - - if (st && st.isDirectory()) { - return rmdir(p, options, er, cb) - } - - options.unlink(p, er => { - if (er) { - if (er.code === 'ENOENT') { - return cb(null) - } - if (er.code === 'EPERM') { - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - } - if (er.code === 'EISDIR') { - return rmdir(p, options, er, cb) - } - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) { - cb(er2.code === 'ENOENT' ? null : er) - } else { - options.stat(p, (er3, stats) => { - if (er3) { - cb(er3.code === 'ENOENT' ? null : er) - } else if (stats.isDirectory()) { - rmdir(p, options, er, cb) - } else { - options.unlink(p, cb) - } - }) - } - }) -} - -function fixWinEPERMSync (p, options, er) { - let stats - - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === 'ENOENT') { - return - } else { - throw er - } - } - - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === 'ENOENT') { - return - } else { - throw er - } - } - - if (stats.isDirectory()) { - rmdirSync(p, options, er) - } else { - options.unlinkSync(p) - } -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { - rmkids(p, options, cb) - } else if (er && er.code === 'ENOTDIR') { - cb(originalEr) - } else { - cb(er) - } - }) -} - -function rmkids (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) return cb(er) - - let n = files.length - let errState - - if (n === 0) return options.rmdir(p, cb) - - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) { - return - } - if (er) return cb(errState = er) - if (--n === 0) { - options.rmdir(p, cb) - } - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - let st - - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === 'ENOENT') { - return - } - - // Windows can EPERM on stat. Life is suffering. - if (er.code === 'EPERM' && isWindows) { - fixWinEPERMSync(p, options, er) - } - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) { - rmdirSync(p, options, null) - } else { - options.unlinkSync(p) - } - } catch (er) { - if (er.code === 'ENOENT') { - return - } else if (er.code === 'EPERM') { - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - } else if (er.code !== 'EISDIR') { - throw er - } - rmdirSync(p, options, er) - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === 'ENOTDIR') { - throw originalEr - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { - rmkidsSync(p, options) - } else if (er.code !== 'ENOENT') { - throw er - } - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - if (isWindows) { - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const startTime = Date.now() - do { - try { - const ret = options.rmdirSync(p, options) - return ret - } catch {} - } while (Date.now() - startTime < 500) // give up after 500ms - } else { - const ret = options.rmdirSync(p, options) - return ret - } -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), - -/***/ 3901: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const util = __nccwpck_require__(1669) - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -function checkPaths (src, dest, funcName, opts, cb) { - util.callbackify(getStats)(src, dest, opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return cb(null, { srcStat, destStat, isChangingCase: true }) - } - return cb(new Error('Source and destination must not be the same.')) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return cb(null, { srcStat, destStat }) - }) -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -function checkParentPaths (src, srcStat, dest, funcName, cb) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() - fs.stat(destParent, { bigint: true }, (err, destStat) => { - if (err) { - if (err.code === 'ENOENT') return cb() - return cb(err) - } - if (areIdentical(srcStat, destStat)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return checkParentPaths(src, srcStat, destParent, funcName, cb) - }) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - checkPaths, - checkPathsSync, - checkParentPaths, - checkParentPathsSync, - isSrcSubdir, - areIdentical -} - - -/***/ }), - -/***/ 2548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function utimesMillis (path, atime, mtime, callback) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - fs.open(path, 'r+', (err, fd) => { - if (err) return callback(err) - fs.futimes(fd, atime, mtime, futimesErr => { - fs.close(fd, closeErr => { - if (callback) callback(futimesErr || closeErr) - }) - }) - }) -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis, - utimesMillisSync -} - - -/***/ }), - -/***/ 7356: -/***/ ((module) => { - -"use strict"; - - -module.exports = clone - -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} - -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj - - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) - - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) - - return copy -} - - -/***/ }), - -/***/ 7758: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var fs = __nccwpck_require__(5747) -var polyfills = __nccwpck_require__(263) -var legacy = __nccwpck_require__(3086) -var clone = __nccwpck_require__(7356) - -var util = __nccwpck_require__(1669) - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - resetQueue() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - resetQueue() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb, startTime) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb, startTime) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb, startTime) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 - } - return go$copyFile(src, dest, flags, cb) - - function go$copyFile (src, dest, flags, cb, startTime) { - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - var noReaddirOptionVersions = /^v[0-5]\./ - function readdir (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - var go$readdir = noReaddirOptionVersions.test(process.version) - ? function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, fs$readdirCallback( - path, options, cb, startTime - )) - } - : function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, options, fs$readdirCallback( - path, options, cb, startTime - )) - } - - return go$readdir(path, options, cb) - - function fs$readdirCallback (path, options, cb, startTime) { - return function (err, files) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([ - go$readdir, - [path, options, cb], - err, - startTime || Date.now(), - Date.now() - ]) - else { - if (files && files.sort) - files.sort() - - if (typeof cb === 'function') - cb.call(this, err, files) - } - } - } - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } - - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } - - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null - - return go$open(path, flags, mode, cb) - - function go$open (path, flags, mode, cb, startTime) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) - retry() -} - -// keep track of the timeout between retry() calls -var retryTimer - -// reset the startTime and lastTime to now -// this resets the start of the 60 second overall timeout as well as the -// delay between attempts so that we'll retry these jobs sooner -function resetQueue () { - var now = Date.now() - for (var i = 0; i < fs[gracefulQueue].length; ++i) { - // entries that are only a length of 2 are from an older version, don't - // bother modifying those since they'll be retried anyway. - if (fs[gracefulQueue][i].length > 2) { - fs[gracefulQueue][i][3] = now // startTime - fs[gracefulQueue][i][4] = now // lastTime - } - } - // call retry to make sure we're actively processing the queue - retry() -} - -function retry () { - // clear the timer and remove it to help prevent unintended concurrency - clearTimeout(retryTimer) - retryTimer = undefined - - if (fs[gracefulQueue].length === 0) - return - - var elem = fs[gracefulQueue].shift() - var fn = elem[0] - var args = elem[1] - // these items may be unset if they were added by an older graceful-fs - var err = elem[2] - var startTime = elem[3] - var lastTime = elem[4] - - // if we don't have a startTime we have no way of knowing if we've waited - // long enough, so go ahead and retry this item now - if (startTime === undefined) { - debug('RETRY', fn.name, args) - fn.apply(null, args) - } else if (Date.now() - startTime >= 60000) { - // it's been more than 60 seconds total, bail now - debug('TIMEOUT', fn.name, args) - var cb = args.pop() - if (typeof cb === 'function') - cb.call(null, err) - } else { - // the amount of time between the last attempt and right now - var sinceAttempt = Date.now() - lastTime - // the amount of time between when we first tried, and when we last tried - // rounded up to at least 1 - var sinceStart = Math.max(lastTime - startTime, 1) - // backoff. wait longer than the total time we've been retrying, but only - // up to a maximum of 100ms - var desiredDelay = Math.min(sinceStart * 1.2, 100) - // it's been long enough since the last retry, do it again - if (sinceAttempt >= desiredDelay) { - debug('RETRY', fn.name, args) - fn.apply(null, args.concat([startTime])) - } else { - // if we can't do this job yet, push it to the end of the queue - // and let the next iteration check again - fs[gracefulQueue].push(elem) - } - } - - // schedule our next run if one isn't already scheduled - if (retryTimer === undefined) { - retryTimer = setTimeout(retry, 0) - } -} - - -/***/ }), - -/***/ 3086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(2413).Stream - -module.exports = legacy - -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); - - Stream.call(this); - - var self = this; - - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; - - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.encoding) this.setEncoding(this.encoding); - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } - - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - - this.pos = this.start; - } - - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } - - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } - - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); - - Stream.call(this); - - this.path = path; - this.fd = null; - this.writable = true; - - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; - } - - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); - } - } -} - - -/***/ }), - -/***/ 263: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var constants = __nccwpck_require__(7619) - -var origCwd = process.cwd -var cwd = null - -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} - -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} - -module.exports = patch - -function patch (fs) { - // (re-)implement some things that are known busted or missing. - - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) - } - - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) - } - - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. - - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) - - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) - - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) - - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) - - // if lchmod/lchown do not exist, then make them no-ops - if (fs.chmod && !fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (fs.chown && !fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } - - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. - - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = typeof fs.rename !== 'function' ? fs.rename - : (function (fs$rename) { - function rename (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) - return rename - })(fs.rename) - } - - // if read() returns EAGAIN, then just try it again. - fs.read = typeof fs.read !== 'function' ? fs.read - : (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) - - fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync - : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) - - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } - - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - - } else if (fs.futimes) { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } - - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } - - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - return stats; - } - } - - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true - - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true - } - - return false - } -} - - -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 6160: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -let _fs -try { - _fs = __nccwpck_require__(7758) -} catch (_) { - _fs = __nccwpck_require__(5747) -} -const universalify = __nccwpck_require__(9046) -const { stringify, stripBom } = __nccwpck_require__(5902) - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - let data = await universalify.fromCallback(fs.readFile)(file, options) - - data = stripBom(data) - - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - - return obj -} - -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } -} - -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - - await universalify.fromCallback(fs.writeFile)(file, str, options) -} - -const writeFile = universalify.fromPromise(_writeFile) - -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} - -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} - -module.exports = jsonfile - - -/***/ }), - -/***/ 5902: -/***/ ((module) => { - -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) - - return str.replace(/\n/g, EOL) + EOF -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } - - -/***/ }), - -/***/ 7129: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -// A linked list to keep track of recently-used-ness -const Yallist = __nccwpck_require__(665) - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) - } - get max () { - return this[MAX] - } - - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] - } - - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] - } - - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength - - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) - } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } - - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev - } - } - - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next - } - } - - keys () { - return this[LRU_LIST].toArray().map(k => k.key) - } - - values () { - return this[LRU_LIST].toArray().map(k => k.value) - } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } - - dumpLru () { - return this[LRU_LIST] - } - - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] - - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') - - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } - - const node = this[CACHE].get(key) - const item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true - } - - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false - } - - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - - peek (key) { - return get(this, key, false) - } - - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null - - del(this, node) - return node.value - } - - del (key) { - del(this, this[CACHE].get(key)) - } - - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } - } - - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} - -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} - -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} - -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined - } - if (hit) - fn.call(thisp, hit.value, hit.key, self) -} - -module.exports = LRUCache - - -/***/ }), - -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__nccwpck_require__(2413)); -var http = _interopDefault(__nccwpck_require__(8605)); -var Url = _interopDefault(__nccwpck_require__(8835)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(7211)); -var zlib = _interopDefault(__nccwpck_require__(8761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = __nccwpck_require__(2877).convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = Url.URL || whatwgUrl.URL; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; - - -/***/ }), - -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), - -/***/ 1532: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const ANY = Symbol('SemVer ANY') -// hoisted class for cyclic dependency -class Comparator { - static get ANY () { - return ANY - } - - constructor (comp, options) { - options = parseOptions(options) - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } - - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) - } - - parse (comp) { - const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] - const m = comp.match(r) - - if (!m) { - throw new TypeError(`Invalid comparator: ${comp}`) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } - } - - toString () { - return this.value - } - - test (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - return cmp(version, this.operator, this.semver, this.options) - } - - intersects (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false, - } - } - - if (this.operator === '') { - if (this.value === '') { - return true - } - return new Range(comp.value, options).test(this.value) - } else if (comp.operator === '') { - if (comp.value === '') { - return true - } - return new Range(this.value, options).test(comp.semver) - } - - const sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - const sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - const sameSemVer = this.semver.version === comp.semver.version - const differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - const oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<') - const oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>') - - return ( - sameDirectionIncreasing || - sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || - oppositeDirectionsGreaterThan - ) - } -} - -module.exports = Comparator - -const parseOptions = __nccwpck_require__(785) -const { re, t } = __nccwpck_require__(9523) -const cmp = __nccwpck_require__(5098) -const debug = __nccwpck_require__(427) -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) - - -/***/ }), - -/***/ 9828: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// hoisted class for cyclic dependency -class Range { - constructor (range, options) { - options = parseOptions(options) - - if (range instanceof Range) { - if ( - range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease - ) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - // just put it in the set and return - this.raw = range.value - this.set = [[range]] - this.format() - return this - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First, split based on boolean or || - this.raw = range - this.set = range - .split('||') - // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) - // throw out any comparator lists that are empty - // this generally means that it was not a valid range, which is allowed - // in loose mode, but will still throw if the WHOLE range is invalid. - .filter(c => c.length) - - if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${range}`) - } - - // if we have any that are not the null set, throw out null sets. - if (this.set.length > 1) { - // keep the first one, in case they're all null sets - const first = this.set[0] - this.set = this.set.filter(c => !isNullSet(c[0])) - if (this.set.length === 0) { - this.set = [first] - } else if (this.set.length > 1) { - // if we have any that are *, then the range is just * - for (const c of this.set) { - if (c.length === 1 && isAny(c[0])) { - this.set = [c] - break - } - } - } - } - - this.format() - } - - format () { - this.range = this.set - .map((comps) => { - return comps.join(' ').trim() - }) - .join('||') - .trim() - return this.range - } - - toString () { - return this.range - } - - parseRange (range) { - range = range.trim() - - // memoize range parsing for performance. - // this is a very hot path, and fully deterministic. - const memoOpts = Object.keys(this.options).join(',') - const memoKey = `parseRange:${memoOpts}:${range}` - const cached = cache.get(memoKey) - if (cached) { - return cached - } - - const loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - let rangeList = range - .split(' ') - .map(comp => parseComparator(comp, this.options)) - .join(' ') - .split(/\s+/) - // >=0.0.0 is equivalent to * - .map(comp => replaceGTE0(comp, this.options)) - - if (loose) { - // in loose mode, throw out any that are not valid comparators - rangeList = rangeList.filter(comp => { - debug('loose invalid filter', comp, this.options) - return !!comp.match(re[t.COMPARATORLOOSE]) - }) - } - debug('range list', rangeList) - - // if any comparators are the null set, then replace with JUST null set - // if more than one comparator, remove any * comparators - // also, don't include the same comparator more than once - const rangeMap = new Map() - const comparators = rangeList.map(comp => new Comparator(comp, this.options)) - for (const comp of comparators) { - if (isNullSet(comp)) { - return [comp] - } - rangeMap.set(comp.value, comp) - } - if (rangeMap.size > 1 && rangeMap.has('')) { - rangeMap.delete('') - } - - const result = [...rangeMap.values()] - cache.set(memoKey, result) - return result - } - - intersects (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some((thisComparators) => { - return ( - isSatisfiable(thisComparators, options) && - range.set.some((rangeComparators) => { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every((thisComparator) => { - return rangeComparators.every((rangeComparator) => { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) - } - - // if ANY of the sets match ALL of its comparators, then pass - test (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - for (let i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false - } -} -module.exports = Range - -const LRU = __nccwpck_require__(7129) -const cache = new LRU({ max: 1000 }) - -const parseOptions = __nccwpck_require__(785) -const Comparator = __nccwpck_require__(1532) -const debug = __nccwpck_require__(427) -const SemVer = __nccwpck_require__(8088) -const { - re, - t, - comparatorTrimReplace, - tildeTrimReplace, - caretTrimReplace, -} = __nccwpck_require__(9523) - -const isNullSet = c => c.value === '<0.0.0-0' -const isAny = c => c.value === '' - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -const isSatisfiable = (comparators, options) => { - let result = true - const remainingComparators = comparators.slice() - let testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every((otherComparator) => { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -const parseComparator = (comp, options) => { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -const isX = id => !id || id.toLowerCase() === 'x' || id === '*' - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 -const replaceTildes = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceTilde(c, options) - }).join(' ') - -const replaceTilde = (comp, options) => { - const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] - return comp.replace(r, (_, M, m, p, pr) => { - debug('tilde', comp, _, M, m, p, pr) - let ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0 <${+M + 1}.0.0-0` - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0-0 - ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` - } else if (pr) { - debug('replaceTilde pr', pr) - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } else { - // ~1.2.3 == >=1.2.3 <1.3.0-0 - ret = `>=${M}.${m}.${p - } <${M}.${+m + 1}.0-0` - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 -// ^1.2.3 --> >=1.2.3 <2.0.0-0 -// ^1.2.0 --> >=1.2.0 <2.0.0-0 -const replaceCarets = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceCaret(c, options) - }).join(' ') - -const replaceCaret = (comp, options) => { - debug('caret', comp, options) - const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] - const z = options.includePrerelease ? '-0' : '' - return comp.replace(r, (_, M, m, p, pr) => { - debug('caret', comp, _, M, m, p, pr) - let ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` - } else if (isX(p)) { - if (M === '0') { - ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` - } else { - ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${+M + 1}.0.0-0` - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p - }${z} <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p - }${z} <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p - } <${+M + 1}.0.0-0` - } - } - - debug('caret return', ret) - return ret - }) -} - -const replaceXRanges = (comp, options) => { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map((c) => { - return replaceXRange(c, options) - }).join(' ') -} - -const replaceXRange = (comp, options) => { - comp = comp.trim() - const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] - return comp.replace(r, (ret, gtlt, M, m, p, pr) => { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - const xM = isX(M) - const xm = xM || isX(m) - const xp = xm || isX(p) - const anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - if (gtlt === '<') { - pr = '-0' - } - - ret = `${gtlt + M}.${m}.${p}${pr}` - } else if (xm) { - ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` - } else if (xp) { - ret = `>=${M}.${m}.0${pr - } <${M}.${+m + 1}.0-0` - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -const replaceStars = (comp, options) => { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') -} - -const replaceGTE0 = (comp, options) => { - debug('replaceGTE0', comp, options) - return comp.trim() - .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 -const hyphenReplace = incPr => ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) => { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = `>=${fM}.0.0${incPr ? '-0' : ''}` - } else if (isX(fp)) { - from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` - } else if (fpr) { - from = `>=${from}` - } else { - from = `>=${from}${incPr ? '-0' : ''}` - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = `<${+tM + 1}.0.0-0` - } else if (isX(tp)) { - to = `<${tM}.${+tm + 1}.0-0` - } else if (tpr) { - to = `<=${tM}.${tm}.${tp}-${tpr}` - } else if (incPr) { - to = `<${tM}.${tm}.${+tp + 1}-0` - } else { - to = `<=${to}` - } - - return (`${from} ${to}`).trim() -} - -const testSet = (set, version, options) => { - for (let i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (let i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === Comparator.ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - const allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - - -/***/ }), - -/***/ 8088: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const debug = __nccwpck_require__(427) -const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(2293) -const { re, t } = __nccwpck_require__(9523) - -const parseOptions = __nccwpck_require__(785) -const { compareIdentifiers } = __nccwpck_require__(2463) -class SemVer { - constructor (version, options) { - options = parseOptions(options) - - if (version instanceof SemVer) { - if (version.loose === !!options.loose && - version.includePrerelease === !!options.includePrerelease) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError(`Invalid Version: ${version}`) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError( - `version is longer than ${MAX_LENGTH} characters` - ) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - // this isn't actually relevant for versions, but keep it so that we - // don't run into trouble passing this.options around. - this.includePrerelease = !!options.includePrerelease - - const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) - - if (!m) { - throw new TypeError(`Invalid Version: ${version}`) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map((id) => { - if (/^[0-9]+$/.test(id)) { - const num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() - } - - format () { - this.version = `${this.major}.${this.minor}.${this.patch}` - if (this.prerelease.length) { - this.version += `-${this.prerelease.join('.')}` - } - return this.version - } - - toString () { - return this.version - } - - compare (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - if (typeof other === 'string' && other === this.version) { - return 0 - } - other = new SemVer(other, this.options) - } - - if (other.version === this.version) { - return 0 - } - - return this.compareMain(other) || this.comparePre(other) - } - - compareMain (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return ( - compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) - ) - } - - comparePre (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - let i = 0 - do { - const a = this.prerelease[i] - const b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } - - compareBuild (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - let i = 0 - do { - const a = this.build[i] - const b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } - - // preminor will bump the version up to the next minor release, and immediately - // down to pre-release. premajor and prepatch work the same way. - inc (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if ( - this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0 - ) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - let i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (compareIdentifiers(this.prerelease[0], identifier) === 0) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error(`invalid increment argument: ${release}`) - } - this.format() - this.raw = this.version - return this - } -} - -module.exports = SemVer - - -/***/ }), - -/***/ 8848: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(5925) -const clean = (version, options) => { - const s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} -module.exports = clean - - -/***/ }), - -/***/ 5098: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const eq = __nccwpck_require__(1898) -const neq = __nccwpck_require__(6017) -const gt = __nccwpck_require__(4123) -const gte = __nccwpck_require__(5522) -const lt = __nccwpck_require__(194) -const lte = __nccwpck_require__(7520) - -const cmp = (a, op, b, loose) => { - switch (op) { - case '===': - if (typeof a === 'object') { - a = a.version - } - if (typeof b === 'object') { - b = b.version - } - return a === b - - case '!==': - if (typeof a === 'object') { - a = a.version - } - if (typeof b === 'object') { - b = b.version - } - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError(`Invalid operator: ${op}`) - } -} -module.exports = cmp - - -/***/ }), - -/***/ 3466: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const parse = __nccwpck_require__(5925) -const { re, t } = __nccwpck_require__(9523) - -const coerce = (version, options) => { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - let match = null - if (!options.rtl) { - match = version.match(re[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - let next - while ((next = re[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) -} -module.exports = coerce - - -/***/ }), - -/***/ 2156: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const compareBuild = (a, b, loose) => { - const versionA = new SemVer(a, loose) - const versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} -module.exports = compareBuild - - -/***/ }), - -/***/ 2804: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const compareLoose = (a, b) => compare(a, b, true) -module.exports = compareLoose - - -/***/ }), - -/***/ 4309: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const compare = (a, b, loose) => - new SemVer(a, loose).compare(new SemVer(b, loose)) - -module.exports = compare - - -/***/ }), - -/***/ 4297: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(5925) -const eq = __nccwpck_require__(1898) - -const diff = (version1, version2) => { - if (eq(version1, version2)) { - return null - } else { - const v1 = parse(version1) - const v2 = parse(version2) - const hasPre = v1.prerelease.length || v2.prerelease.length - const prefix = hasPre ? 'pre' : '' - const defaultResult = hasPre ? 'prerelease' : '' - for (const key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} -module.exports = diff - - -/***/ }), - -/***/ 1898: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const eq = (a, b, loose) => compare(a, b, loose) === 0 -module.exports = eq - - -/***/ }), - -/***/ 4123: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const gt = (a, b, loose) => compare(a, b, loose) > 0 -module.exports = gt - - -/***/ }), - -/***/ 5522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const gte = (a, b, loose) => compare(a, b, loose) >= 0 -module.exports = gte - - -/***/ }), - -/***/ 900: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) - -const inc = (version, release, options, identifier) => { - if (typeof (options) === 'string') { - identifier = options - options = undefined - } - - try { - return new SemVer( - version instanceof SemVer ? version.version : version, - options - ).inc(release, identifier).version - } catch (er) { - return null - } -} -module.exports = inc - - -/***/ }), - -/***/ 194: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const lt = (a, b, loose) => compare(a, b, loose) < 0 -module.exports = lt - - -/***/ }), - -/***/ 7520: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const lte = (a, b, loose) => compare(a, b, loose) <= 0 -module.exports = lte - - -/***/ }), - -/***/ 6688: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const major = (a, loose) => new SemVer(a, loose).major -module.exports = major - - -/***/ }), - -/***/ 8447: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const minor = (a, loose) => new SemVer(a, loose).minor -module.exports = minor - - -/***/ }), - -/***/ 6017: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const neq = (a, b, loose) => compare(a, b, loose) !== 0 -module.exports = neq - - -/***/ }), - -/***/ 5925: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const { MAX_LENGTH } = __nccwpck_require__(2293) -const { re, t } = __nccwpck_require__(9523) -const SemVer = __nccwpck_require__(8088) - -const parseOptions = __nccwpck_require__(785) -const parse = (version, options) => { - options = parseOptions(options) - - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - const r = options.loose ? re[t.LOOSE] : re[t.FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} - -module.exports = parse - - -/***/ }), - -/***/ 2866: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const patch = (a, loose) => new SemVer(a, loose).patch -module.exports = patch - - -/***/ }), - -/***/ 6014: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(5925) -const prerelease = (version, options) => { - const parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} -module.exports = prerelease - - -/***/ }), - -/***/ 6417: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const rcompare = (a, b, loose) => compare(b, a, loose) -module.exports = rcompare - - -/***/ }), - -/***/ 8701: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compareBuild = __nccwpck_require__(2156) -const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) -module.exports = rsort - - -/***/ }), - -/***/ 6055: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(9828) -const satisfies = (version, range, options) => { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} -module.exports = satisfies - - -/***/ }), - -/***/ 1426: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compareBuild = __nccwpck_require__(2156) -const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) -module.exports = sort - - -/***/ }), - -/***/ 9601: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(5925) -const valid = (version, options) => { - const v = parse(version, options) - return v ? v.version : null -} -module.exports = valid - - -/***/ }), - -/***/ 1383: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// just pre-load all the stuff that index.js lazily exports -const internalRe = __nccwpck_require__(9523) -module.exports = { - re: internalRe.re, - src: internalRe.src, - tokens: internalRe.t, - SEMVER_SPEC_VERSION: __nccwpck_require__(2293).SEMVER_SPEC_VERSION, - SemVer: __nccwpck_require__(8088), - compareIdentifiers: __nccwpck_require__(2463).compareIdentifiers, - rcompareIdentifiers: __nccwpck_require__(2463).rcompareIdentifiers, - parse: __nccwpck_require__(5925), - valid: __nccwpck_require__(9601), - clean: __nccwpck_require__(8848), - inc: __nccwpck_require__(900), - diff: __nccwpck_require__(4297), - major: __nccwpck_require__(6688), - minor: __nccwpck_require__(8447), - patch: __nccwpck_require__(2866), - prerelease: __nccwpck_require__(6014), - compare: __nccwpck_require__(4309), - rcompare: __nccwpck_require__(6417), - compareLoose: __nccwpck_require__(2804), - compareBuild: __nccwpck_require__(2156), - sort: __nccwpck_require__(1426), - rsort: __nccwpck_require__(8701), - gt: __nccwpck_require__(4123), - lt: __nccwpck_require__(194), - eq: __nccwpck_require__(1898), - neq: __nccwpck_require__(6017), - gte: __nccwpck_require__(5522), - lte: __nccwpck_require__(7520), - cmp: __nccwpck_require__(5098), - coerce: __nccwpck_require__(3466), - Comparator: __nccwpck_require__(1532), - Range: __nccwpck_require__(9828), - satisfies: __nccwpck_require__(6055), - toComparators: __nccwpck_require__(2706), - maxSatisfying: __nccwpck_require__(579), - minSatisfying: __nccwpck_require__(832), - minVersion: __nccwpck_require__(4179), - validRange: __nccwpck_require__(2098), - outside: __nccwpck_require__(420), - gtr: __nccwpck_require__(9380), - ltr: __nccwpck_require__(3323), - intersects: __nccwpck_require__(7008), - simplifyRange: __nccwpck_require__(5297), - subset: __nccwpck_require__(7863), -} - - -/***/ }), - -/***/ 2293: -/***/ ((module) => { - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -const SEMVER_SPEC_VERSION = '2.0.0' - -const MAX_LENGTH = 256 -const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || -/* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -const MAX_SAFE_COMPONENT_LENGTH = 16 - -module.exports = { - SEMVER_SPEC_VERSION, - MAX_LENGTH, - MAX_SAFE_INTEGER, - MAX_SAFE_COMPONENT_LENGTH, -} - - -/***/ }), - -/***/ 427: -/***/ ((module) => { - -const debug = ( - typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG) -) ? (...args) => console.error('SEMVER', ...args) - : () => {} - -module.exports = debug - - -/***/ }), - -/***/ 2463: -/***/ ((module) => { - -const numeric = /^[0-9]+$/ -const compareIdentifiers = (a, b) => { - const anum = numeric.test(a) - const bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) - -module.exports = { - compareIdentifiers, - rcompareIdentifiers, -} - - -/***/ }), - -/***/ 785: -/***/ ((module) => { - -// parse out just the options we care about so we always get a consistent -// obj with keys in a consistent order. -const opts = ['includePrerelease', 'loose', 'rtl'] -const parseOptions = options => - !options ? {} - : typeof options !== 'object' ? { loose: true } - : opts.filter(k => options[k]).reduce((o, k) => { - o[k] = true - return o - }, {}) -module.exports = parseOptions - - -/***/ }), - -/***/ 9523: -/***/ ((module, exports, __nccwpck_require__) => { - -const { MAX_SAFE_COMPONENT_LENGTH } = __nccwpck_require__(2293) -const debug = __nccwpck_require__(427) -exports = module.exports = {} - -// The actual regexps go on exports.re -const re = exports.re = [] -const src = exports.src = [] -const t = exports.t = {} -let R = 0 - -const createToken = (name, value, isGlobal) => { - const index = R++ - debug(name, index, value) - t[name] = index - src[index] = value - re[index] = new RegExp(value, isGlobal ? 'g' : undefined) -} - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') -createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') - -// ## Main Version -// Three dot-separated numeric identifiers. - -createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})`) - -createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})`) - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] -}|${src[t.NONNUMERICIDENTIFIER]})`) - -createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] -}|${src[t.NONNUMERICIDENTIFIER]})`) - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] -}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) - -createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] -}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] -}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -createToken('FULLPLAIN', `v?${src[t.MAINVERSION] -}${src[t.PRERELEASE]}?${ - src[t.BUILD]}?`) - -createToken('FULL', `^${src[t.FULLPLAIN]}$`) - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] -}${src[t.PRERELEASELOOSE]}?${ - src[t.BUILD]}?`) - -createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) - -createToken('GTLT', '((?:<|>)?=?)') - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) -createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) - -createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:${src[t.PRERELEASE]})?${ - src[t.BUILD]}?` + - `)?)?`) - -createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:${src[t.PRERELEASELOOSE]})?${ - src[t.BUILD]}?` + - `)?)?`) - -createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) -createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -createToken('COERCE', `${'(^|[^\\d])' + - '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:$|[^\\d])`) -createToken('COERCERTL', src[t.COERCE], true) - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -createToken('LONETILDE', '(?:~>?)') - -createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) -exports.tildeTrimReplace = '$1~' - -createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) -createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -createToken('LONECARET', '(?:\\^)') - -createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) -exports.caretTrimReplace = '$1^' - -createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) -createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) -createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] -}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) -exports.comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAIN]})` + - `\\s*$`) - -createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAINLOOSE]})` + - `\\s*$`) - -// Star ranges basically just allow anything at all. -createToken('STAR', '(<|>)?=?\\s*\\*') -// >=0.0.0 is like a star -createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') -createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') - - -/***/ }), - -/***/ 9380: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Determine if version is greater than all the versions possible in the range. -const outside = __nccwpck_require__(420) -const gtr = (version, range, options) => outside(version, range, '>', options) -module.exports = gtr - - -/***/ }), - -/***/ 7008: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(9828) -const intersects = (r1, r2, options) => { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} -module.exports = intersects - - -/***/ }), - -/***/ 3323: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const outside = __nccwpck_require__(420) -// Determine if version is less than all the versions possible in the range -const ltr = (version, range, options) => outside(version, range, '<', options) -module.exports = ltr - - -/***/ }), - -/***/ 579: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) - -const maxSatisfying = (versions, range, options) => { - let max = null - let maxSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} -module.exports = maxSatisfying - - -/***/ }), - -/***/ 832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) -const minSatisfying = (versions, range, options) => { - let min = null - let minSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} -module.exports = minSatisfying - - -/***/ }), - -/***/ 4179: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) -const gt = __nccwpck_require__(4123) - -const minVersion = (range, loose) => { - range = new Range(range, loose) - - let minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] - - let setMin = null - comparators.forEach((comparator) => { - // Clone to avoid manipulating the comparator's semver object. - const compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!setMin || gt(compver, setMin)) { - setMin = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error(`Unexpected operation: ${comparator.operator}`) - } - }) - if (setMin && (!minver || gt(minver, setMin))) { - minver = setMin - } - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} -module.exports = minVersion - - -/***/ }), - -/***/ 420: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(8088) -const Comparator = __nccwpck_require__(1532) -const { ANY } = Comparator -const Range = __nccwpck_require__(9828) -const satisfies = __nccwpck_require__(6055) -const gt = __nccwpck_require__(4123) -const lt = __nccwpck_require__(194) -const lte = __nccwpck_require__(7520) -const gte = __nccwpck_require__(5522) - -const outside = (version, range, hilo, options) => { - version = new SemVer(version, options) - range = new Range(range, options) - - let gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisfies the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] - - let high = null - let low = null - - comparators.forEach((comparator) => { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -module.exports = outside - - -/***/ }), - -/***/ 5297: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// given a set of versions and a range, create a "simplified" range -// that includes the same versions that the original range does -// If the original range is shorter than the simplified one, return that. -const satisfies = __nccwpck_require__(6055) -const compare = __nccwpck_require__(4309) -module.exports = (versions, range, options) => { - const set = [] - let first = null - let prev = null - const v = versions.sort((a, b) => compare(a, b, options)) - for (const version of v) { - const included = satisfies(version, range, options) - if (included) { - prev = version - if (!first) { - first = version - } - } else { - if (prev) { - set.push([first, prev]) - } - prev = null - first = null - } - } - if (first) { - set.push([first, null]) - } - - const ranges = [] - for (const [min, max] of set) { - if (min === max) { - ranges.push(min) - } else if (!max && min === v[0]) { - ranges.push('*') - } else if (!max) { - ranges.push(`>=${min}`) - } else if (min === v[0]) { - ranges.push(`<=${max}`) - } else { - ranges.push(`${min} - ${max}`) - } - } - const simplified = ranges.join(' || ') - const original = typeof range.raw === 'string' ? range.raw : String(range) - return simplified.length < original.length ? simplified : range -} - - -/***/ }), - -/***/ 7863: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(9828) -const Comparator = __nccwpck_require__(1532) -const { ANY } = Comparator -const satisfies = __nccwpck_require__(6055) -const compare = __nccwpck_require__(4309) - -// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: -// - Every simple range `r1, r2, ...` is a null set, OR -// - Every simple range `r1, r2, ...` which is not a null set is a subset of -// some `R1, R2, ...` -// -// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: -// - If c is only the ANY comparator -// - If C is only the ANY comparator, return true -// - Else if in prerelease mode, return false -// - else replace c with `[>=0.0.0]` -// - If C is only the ANY comparator -// - if in prerelease mode, return true -// - else replace C with `[>=0.0.0]` -// - Let EQ be the set of = comparators in c -// - If EQ is more than one, return true (null set) -// - Let GT be the highest > or >= comparator in c -// - Let LT be the lowest < or <= comparator in c -// - If GT and LT, and GT.semver > LT.semver, return true (null set) -// - If any C is a = range, and GT or LT are set, return false -// - If EQ -// - If GT, and EQ does not satisfy GT, return true (null set) -// - If LT, and EQ does not satisfy LT, return true (null set) -// - If EQ satisfies every C, return true -// - Else return false -// - If GT -// - If GT.semver is lower than any > or >= comp in C, return false -// - If GT is >=, and GT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the GT.semver tuple, return false -// - If LT -// - If LT.semver is greater than any < or <= comp in C, return false -// - If LT is <=, and LT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the LT.semver tuple, return false -// - Else return true - -const subset = (sub, dom, options = {}) => { - if (sub === dom) { - return true - } - - sub = new Range(sub, options) - dom = new Range(dom, options) - let sawNonNull = false - - OUTER: for (const simpleSub of sub.set) { - for (const simpleDom of dom.set) { - const isSub = simpleSubset(simpleSub, simpleDom, options) - sawNonNull = sawNonNull || isSub !== null - if (isSub) { - continue OUTER - } - } - // the null set is a subset of everything, but null simple ranges in - // a complex range should be ignored. so if we saw a non-null range, - // then we know this isn't a subset, but if EVERY simple range was null, - // then it is a subset. - if (sawNonNull) { - return false - } - } - return true -} - -const simpleSubset = (sub, dom, options) => { - if (sub === dom) { - return true - } - - if (sub.length === 1 && sub[0].semver === ANY) { - if (dom.length === 1 && dom[0].semver === ANY) { - return true - } else if (options.includePrerelease) { - sub = [new Comparator('>=0.0.0-0')] - } else { - sub = [new Comparator('>=0.0.0')] - } - } - - if (dom.length === 1 && dom[0].semver === ANY) { - if (options.includePrerelease) { - return true - } else { - dom = [new Comparator('>=0.0.0')] - } - } - - const eqSet = new Set() - let gt, lt - for (const c of sub) { - if (c.operator === '>' || c.operator === '>=') { - gt = higherGT(gt, c, options) - } else if (c.operator === '<' || c.operator === '<=') { - lt = lowerLT(lt, c, options) - } else { - eqSet.add(c.semver) - } - } - - if (eqSet.size > 1) { - return null - } - - let gtltComp - if (gt && lt) { - gtltComp = compare(gt.semver, lt.semver, options) - if (gtltComp > 0) { - return null - } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { - return null - } - } - - // will iterate one or zero times - for (const eq of eqSet) { - if (gt && !satisfies(eq, String(gt), options)) { - return null - } - - if (lt && !satisfies(eq, String(lt), options)) { - return null - } - - for (const c of dom) { - if (!satisfies(eq, String(c), options)) { - return false - } - } - - return true - } - - let higher, lower - let hasDomLT, hasDomGT - // if the subset has a prerelease, we need a comparator in the superset - // with the same tuple and a prerelease, or it's not a subset - let needDomLTPre = lt && - !options.includePrerelease && - lt.semver.prerelease.length ? lt.semver : false - let needDomGTPre = gt && - !options.includePrerelease && - gt.semver.prerelease.length ? gt.semver : false - // exception: <1.2.3-0 is the same as <1.2.3 - if (needDomLTPre && needDomLTPre.prerelease.length === 1 && - lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { - needDomLTPre = false - } - - for (const c of dom) { - hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' - hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' - if (gt) { - if (needDomGTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomGTPre.major && - c.semver.minor === needDomGTPre.minor && - c.semver.patch === needDomGTPre.patch) { - needDomGTPre = false - } - } - if (c.operator === '>' || c.operator === '>=') { - higher = higherGT(gt, c, options) - if (higher === c && higher !== gt) { - return false - } - } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { - return false - } - } - if (lt) { - if (needDomLTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomLTPre.major && - c.semver.minor === needDomLTPre.minor && - c.semver.patch === needDomLTPre.patch) { - needDomLTPre = false - } - } - if (c.operator === '<' || c.operator === '<=') { - lower = lowerLT(lt, c, options) - if (lower === c && lower !== lt) { - return false - } - } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { - return false - } - } - if (!c.operator && (lt || gt) && gtltComp !== 0) { - return false - } - } - - // if there was a < or >, and nothing in the dom, then must be false - // UNLESS it was limited by another range in the other direction. - // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 - if (gt && hasDomLT && !lt && gtltComp !== 0) { - return false - } - - if (lt && hasDomGT && !gt && gtltComp !== 0) { - return false - } - - // we needed a prerelease range in a specific tuple, but didn't get one - // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, - // because it includes prereleases in the 1.2.3 tuple - if (needDomGTPre || needDomLTPre) { - return false - } - - return true -} - -// >=1.2.3 is lower than >1.2.3 -const higherGT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp > 0 ? a - : comp < 0 ? b - : b.operator === '>' && a.operator === '>=' ? b - : a -} - -// <=1.2.3 is higher than <1.2.3 -const lowerLT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp < 0 ? a - : comp > 0 ? b - : b.operator === '<' && a.operator === '<=' ? b - : a -} - -module.exports = subset - - -/***/ }), - -/***/ 2706: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(9828) - -// Mostly just for testing and legacy API reasons -const toComparators = (range, options) => - new Range(range, options).set - .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) - -module.exports = toComparators - - -/***/ }), - -/***/ 2098: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(9828) -const validRange = (range, options) => { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} -module.exports = validRange - - -/***/ }), - -/***/ 4256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var punycode = __nccwpck_require__(4213); -var mappingTable = __nccwpck_require__(68); - -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; - -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} - -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; - - while (start <= end) { - var mid = Math.floor((start + end) / 2); - - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; - } - } - - return null; -} - -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} - -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; - - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); - - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } - - processed += String.fromCodePoint(codePoint); - break; - } - } - - return { - string: processed, - error: hasError - }; -} - -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } - - var error = false; - - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; - } - - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; - } - } - - return { - label: label, - error: error - }; -} - -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); - - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } - } - - return { - string: labels.join("."), - error: result.error - }; -} - -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; - } - }); - - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; - } - - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; - } - } - } - - if (result.error) return null; - return labels.join("."); -}; - -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - - return { - domain: result.string, - error: result.error - }; -}; - -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - - -/***/ }), - -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(4219); - - -/***/ }), - -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var net = __nccwpck_require__(1631); -var tls = __nccwpck_require__(4016); -var http = __nccwpck_require__(8605); -var https = __nccwpck_require__(7211); -var events = __nccwpck_require__(8614); -var assert = __nccwpck_require__(2357); -var util = __nccwpck_require__(1669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), - -/***/ 5030: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9046: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - fn.call( - this, - ...args, - (err, res) => (err != null) ? reject(err) : resolve(res) - ) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) - }, 'name', { value: fn.name }) -} - - -/***/ }), - -/***/ 4886: -/***/ ((module) => { - -"use strict"; - - -var conversions = {}; -module.exports = conversions; - -function sign(x) { - return x < 0 ? -1 : 1; -} - -function evenRound(x) { - // Round x to the nearest integer, choosing the even integer if it lies halfway between two. - if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) - return Math.floor(x); - } else { - return Math.round(x); - } -} - -function createNumberConversion(bitLength, typeOpts) { - if (!typeOpts.unsigned) { - --bitLength; - } - const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); - const upperBound = Math.pow(2, bitLength) - 1; - - const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); - const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); - - return function(V, opts) { - if (!opts) opts = {}; - - let x = +V; - - if (opts.enforceRange) { - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite number"); - } - - x = sign(x) * Math.floor(Math.abs(x)); - if (x < lowerBound || x > upperBound) { - throw new TypeError("Argument is not in byte range"); - } - - return x; - } - - if (!isNaN(x) && opts.clamp) { - x = evenRound(x); - - if (x < lowerBound) x = lowerBound; - if (x > upperBound) x = upperBound; - return x; - } - - if (!Number.isFinite(x) || x === 0) { - return 0; - } - - x = sign(x) * Math.floor(Math.abs(x)); - x = x % moduloVal; - - if (!typeOpts.unsigned && x >= moduloBound) { - return x - moduloVal; - } else if (typeOpts.unsigned) { - if (x < 0) { - x += moduloVal; - } else if (x === -0) { // don't return negative zero - return 0; - } - } - - return x; - } -} - -conversions["void"] = function () { - return undefined; -}; - -conversions["boolean"] = function (val) { - return !!val; -}; - -conversions["byte"] = createNumberConversion(8, { unsigned: false }); -conversions["octet"] = createNumberConversion(8, { unsigned: true }); - -conversions["short"] = createNumberConversion(16, { unsigned: false }); -conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); - -conversions["long"] = createNumberConversion(32, { unsigned: false }); -conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); - -conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); -conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); - -conversions["double"] = function (V) { - const x = +V; - - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite floating-point value"); - } - - return x; -}; - -conversions["unrestricted double"] = function (V) { - const x = +V; - - if (isNaN(x)) { - throw new TypeError("Argument is NaN"); - } - - return x; -}; - -// not quite valid, but good enough for JS -conversions["float"] = conversions["double"]; -conversions["unrestricted float"] = conversions["unrestricted double"]; - -conversions["DOMString"] = function (V, opts) { - if (!opts) opts = {}; - - if (opts.treatNullAsEmptyString && V === null) { - return ""; - } - - return String(V); -}; - -conversions["ByteString"] = function (V, opts) { - const x = String(V); - let c = undefined; - for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { - if (c > 255) { - throw new TypeError("Argument is not a valid bytestring"); - } - } - - return x; -}; - -conversions["USVString"] = function (V) { - const S = String(V); - const n = S.length; - const U = []; - for (let i = 0; i < n; ++i) { - const c = S.charCodeAt(i); - if (c < 0xD800 || c > 0xDFFF) { - U.push(String.fromCodePoint(c)); - } else if (0xDC00 <= c && c <= 0xDFFF) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - if (i === n - 1) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - const d = S.charCodeAt(i + 1); - if (0xDC00 <= d && d <= 0xDFFF) { - const a = c & 0x3FF; - const b = d & 0x3FF; - U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); - ++i; - } else { - U.push(String.fromCodePoint(0xFFFD)); - } - } - } - } - - return U.join(''); -}; - -conversions["Date"] = function (V, opts) { - if (!(V instanceof Date)) { - throw new TypeError("Argument is not a Date object"); - } - if (isNaN(V)) { - return undefined; - } - - return V; -}; - -conversions["RegExp"] = function (V, opts) { - if (!(V instanceof RegExp)) { - V = new RegExp(V); - } - - return V; -}; - - -/***/ }), - -/***/ 7537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -const usm = __nccwpck_require__(2158); - -exports.implementation = class URLImpl { - constructor(constructorArgs) { - const url = constructorArgs[0]; - const base = constructorArgs[1]; - - let parsedBase = null; - if (base !== undefined) { - parsedBase = usm.basicURLParse(base); - if (parsedBase === "failure") { - throw new TypeError("Invalid base URL"); - } - } - - const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - - // TODO: query stuff - } - - get href() { - return usm.serializeURL(this._url); - } - - set href(v) { - const parsedURL = usm.basicURLParse(v); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); - } - - this._url = parsedURL; - } - - get origin() { - return usm.serializeURLOrigin(this._url); - } - - get protocol() { - return this._url.scheme + ":"; - } - - set protocol(v) { - usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); - } - - get username() { - return this._url.username; - } - - set username(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setTheUsername(this._url, v); - } - - get password() { - return this._url.password; - } - - set password(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - usm.setThePassword(this._url, v); - } - - get host() { - const url = this._url; - - if (url.host === null) { - return ""; - } - - if (url.port === null) { - return usm.serializeHost(url.host); - } - - return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); - } - - set host(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); - } - - get hostname() { - if (this._url.host === null) { - return ""; - } - - return usm.serializeHost(this._url.host); - } - - set hostname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); - } - - get port() { - if (this._url.port === null) { - return ""; - } - - return usm.serializeInteger(this._url.port); - } - - set port(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } - - if (v === "") { - this._url.port = null; - } else { - usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); - } - } - - get pathname() { - if (this._url.cannotBeABaseURL) { - return this._url.path[0]; - } - - if (this._url.path.length === 0) { - return ""; - } - - return "/" + this._url.path.join("/"); - } - - set pathname(v) { - if (this._url.cannotBeABaseURL) { - return; - } - - this._url.path = []; - usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); - } - - get search() { - if (this._url.query === null || this._url.query === "") { - return ""; - } - - return "?" + this._url.query; - } - - set search(v) { - // TODO: query stuff - - const url = this._url; - - if (v === "") { - url.query = null; - return; - } - - const input = v[0] === "?" ? v.substring(1) : v; - url.query = ""; - usm.basicURLParse(input, { url, stateOverride: "query" }); - } - - get hash() { - if (this._url.fragment === null || this._url.fragment === "") { - return ""; - } - - return "#" + this._url.fragment; - } - - set hash(v) { - if (v === "") { - this._url.fragment = null; - return; - } - - const input = v[0] === "#" ? v.substring(1) : v; - this._url.fragment = ""; - usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); - } - - toJSON() { - return this.href; - } -}; - - -/***/ }), - -/***/ 3394: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const conversions = __nccwpck_require__(4886); -const utils = __nccwpck_require__(3185); -const Impl = __nccwpck_require__(7537); - -const impl = utils.implSymbol; - -function URL(url) { - if (!this || this[impl] || !(this instanceof URL)) { - throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); - } - if (arguments.length < 1) { - throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); - } - const args = []; - for (let i = 0; i < arguments.length && i < 2; ++i) { - args[i] = arguments[i]; - } - args[0] = conversions["USVString"](args[0]); - if (args[1] !== undefined) { - args[1] = conversions["USVString"](args[1]); - } - - module.exports.setup(this, args); -} - -URL.prototype.toJSON = function toJSON() { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - const args = []; - for (let i = 0; i < arguments.length && i < 0; ++i) { - args[i] = arguments[i]; - } - return this[impl].toJSON.apply(this[impl], args); -}; -Object.defineProperty(URL.prototype, "href", { - get() { - return this[impl].href; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].href = V; - }, - enumerable: true, - configurable: true -}); - -URL.prototype.toString = function () { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - return this.href; -}; - -Object.defineProperty(URL.prototype, "origin", { - get() { - return this[impl].origin; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "protocol", { - get() { - return this[impl].protocol; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].protocol = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "username", { - get() { - return this[impl].username; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].username = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "password", { - get() { - return this[impl].password; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].password = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "host", { - get() { - return this[impl].host; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].host = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hostname", { - get() { - return this[impl].hostname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hostname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "port", { - get() { - return this[impl].port; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].port = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "pathname", { - get() { - return this[impl].pathname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].pathname = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "search", { - get() { - return this[impl].search; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].search = V; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "hash", { - get() { - return this[impl].hash; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hash = V; - }, - enumerable: true, - configurable: true -}); - - -module.exports = { - is(obj) { - return !!obj && obj[impl] instanceof Impl.implementation; - }, - create(constructorArgs, privateData) { - let obj = Object.create(URL.prototype); - this.setup(obj, constructorArgs, privateData); - return obj; - }, - setup(obj, constructorArgs, privateData) { - if (!privateData) privateData = {}; - privateData.wrapper = obj; - - obj[impl] = new Impl.implementation(constructorArgs, privateData); - obj[impl][utils.wrapperSymbol] = obj; - }, - interface: URL, - expose: { - Window: { URL: URL }, - Worker: { URL: URL } - } -}; - - - -/***/ }), - -/***/ 8665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -exports.URL = __nccwpck_require__(3394).interface; -exports.serializeURL = __nccwpck_require__(2158).serializeURL; -exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; -exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; -exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; -exports.setThePassword = __nccwpck_require__(2158).setThePassword; -exports.serializeHost = __nccwpck_require__(2158).serializeHost; -exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; -exports.parseURL = __nccwpck_require__(2158).parseURL; - - -/***/ }), - -/***/ 2158: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const punycode = __nccwpck_require__(4213); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; - - -/***/ }), - -/***/ 3185: -/***/ ((module) => { - -"use strict"; - - -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } -}; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; -}; - -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; -}; - - - -/***/ }), - -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} - - -/***/ }), - -/***/ 4091: -/***/ ((module) => { - -"use strict"; - -module.exports = function (Yallist) { - Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value - } - } -} - - -/***/ }), - -/***/ 665: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -module.exports = Yallist - -Yallist.Node = Node -Yallist.create = Yallist - -function Yallist (list) { - var self = this - if (!(self instanceof Yallist)) { - self = new Yallist() - } - - self.tail = null - self.head = null - self.length = 0 - - if (list && typeof list.forEach === 'function') { - list.forEach(function (item) { - self.push(item) - }) - } else if (arguments.length > 0) { - for (var i = 0, l = arguments.length; i < l; i++) { - self.push(arguments[i]) - } - } - - return self -} - -Yallist.prototype.removeNode = function (node) { - if (node.list !== this) { - throw new Error('removing node which does not belong to this list') - } - - var next = node.next - var prev = node.prev - - if (next) { - next.prev = prev - } - - if (prev) { - prev.next = next - } - - if (node === this.head) { - this.head = next - } - if (node === this.tail) { - this.tail = prev - } - - node.list.length-- - node.next = null - node.prev = null - node.list = null - - return next -} - -Yallist.prototype.unshiftNode = function (node) { - if (node === this.head) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var head = this.head - node.list = this - node.next = head - if (head) { - head.prev = node - } - - this.head = node - if (!this.tail) { - this.tail = node - } - this.length++ -} - -Yallist.prototype.pushNode = function (node) { - if (node === this.tail) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var tail = this.tail - node.list = this - node.prev = tail - if (tail) { - tail.next = node - } - - this.tail = node - if (!this.head) { - this.head = node - } - this.length++ -} - -Yallist.prototype.push = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - push(this, arguments[i]) - } - return this.length -} - -Yallist.prototype.unshift = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - unshift(this, arguments[i]) - } - return this.length -} - -Yallist.prototype.pop = function () { - if (!this.tail) { - return undefined - } - - var res = this.tail.value - this.tail = this.tail.prev - if (this.tail) { - this.tail.next = null - } else { - this.head = null - } - this.length-- - return res -} - -Yallist.prototype.shift = function () { - if (!this.head) { - return undefined - } - - var res = this.head.value - this.head = this.head.next - if (this.head) { - this.head.prev = null - } else { - this.tail = null - } - this.length-- - return res -} - -Yallist.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.head, i = 0; walker !== null; i++) { - fn.call(thisp, walker.value, i, this) - walker = walker.next - } -} - -Yallist.prototype.forEachReverse = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { - fn.call(thisp, walker.value, i, this) - walker = walker.prev - } -} - -Yallist.prototype.get = function (n) { - for (var i = 0, walker = this.head; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.next - } - if (i === n && walker !== null) { - return walker.value - } -} - -Yallist.prototype.getReverse = function (n) { - for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.prev - } - if (i === n && walker !== null) { - return walker.value - } -} - -Yallist.prototype.map = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.head; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.next - } - return res -} - -Yallist.prototype.mapReverse = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.tail; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.prev - } - return res -} - -Yallist.prototype.reduce = function (fn, initial) { - var acc - var walker = this.head - if (arguments.length > 1) { - acc = initial - } else if (this.head) { - walker = this.head.next - acc = this.head.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = 0; walker !== null; i++) { - acc = fn(acc, walker.value, i) - walker = walker.next - } - - return acc -} - -Yallist.prototype.reduceReverse = function (fn, initial) { - var acc - var walker = this.tail - if (arguments.length > 1) { - acc = initial - } else if (this.tail) { - walker = this.tail.prev - acc = this.tail.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = this.length - 1; walker !== null; i--) { - acc = fn(acc, walker.value, i) - walker = walker.prev - } - - return acc -} - -Yallist.prototype.toArray = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.head; walker !== null; i++) { - arr[i] = walker.value - walker = walker.next - } - return arr -} - -Yallist.prototype.toArrayReverse = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.tail; walker !== null; i++) { - arr[i] = walker.value - walker = walker.prev - } - return arr -} - -Yallist.prototype.slice = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = 0, walker = this.head; walker !== null && i < from; i++) { - walker = walker.next - } - for (; walker !== null && i < to; i++, walker = walker.next) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.sliceReverse = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { - walker = walker.prev - } - for (; walker !== null && i > from; i--, walker = walker.prev) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.splice = function (start, deleteCount, ...nodes) { - if (start > this.length) { - start = this.length - 1 - } - if (start < 0) { - start = this.length + start; - } - - for (var i = 0, walker = this.head; walker !== null && i < start; i++) { - walker = walker.next - } - - var ret = [] - for (var i = 0; walker && i < deleteCount; i++) { - ret.push(walker.value) - walker = this.removeNode(walker) - } - if (walker === null) { - walker = this.tail - } - - if (walker !== this.head && walker !== this.tail) { - walker = walker.prev - } - - for (var i = 0; i < nodes.length; i++) { - walker = insert(this, walker, nodes[i]) - } - return ret; -} - -Yallist.prototype.reverse = function () { - var head = this.head - var tail = this.tail - for (var walker = head; walker !== null; walker = walker.prev) { - var p = walker.prev - walker.prev = walker.next - walker.next = p - } - this.head = tail - this.tail = head - return this -} - -function insert (self, node, value) { - var inserted = node === self.head ? - new Node(value, null, node, self) : - new Node(value, node, node.next, self) - - if (inserted.next === null) { - self.tail = inserted - } - if (inserted.prev === null) { - self.head = inserted - } - - self.length++ - - return inserted -} - -function push (self, item) { - self.tail = new Node(item, self.tail, null, self) - if (!self.head) { - self.head = self.tail - } - self.length++ -} - -function unshift (self, item) { - self.head = new Node(item, null, self.head, self) - if (!self.tail) { - self.tail = self.head - } - self.length++ -} - -function Node (value, prev, next, list) { - if (!(this instanceof Node)) { - return new Node(value, prev, next, list) - } - - this.list = list - this.value = value - - if (prev) { - prev.next = this - this.prev = prev - } else { - this.prev = null - } - - if (next) { - next.prev = this - this.next = next - } else { - this.next = null - } -} - -try { - // add if support for Symbol.iterator is present - __nccwpck_require__(4091)(Yallist) -} catch (er) {} - - -/***/ }), - -/***/ 7786: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockMap = __nccwpck_require__(6638); -var resolveBlockSeq = __nccwpck_require__(2257); -var resolveFlowCollection = __nccwpck_require__(6085); - -function composeCollection(CN, ctx, token, tagToken, onError) { - let coll; - switch (token.type) { - case 'block-map': { - coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError); - break; - } - case 'block-seq': { - coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError); - break; - } - case 'flow-collection': { - coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError); - break; - } - } - if (!tagToken) - return coll; - const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (!tagName) - return coll; - // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841 - const Coll = coll.constructor; - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - const expType = Node.isMap(coll) ? 'map' : 'seq'; - let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - coll.tag = tagName; - return coll; - } - } - const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options); - const node = Node.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; - - -/***/ }), - -/***/ 9157: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Document = __nccwpck_require__(1785); -var composeNode = __nccwpck_require__(6572); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; - - -/***/ }), - -/***/ 6572: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var composeCollection = __nccwpck_require__(7786); -var composeScalar = __nccwpck_require__(1490); -var resolveEnd = __nccwpck_require__(1789); -var utilEmptyScalarPosition = __nccwpck_require__(8517); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) - node.comment = comment; - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; - - -/***/ }), - -/***/ 1490: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[Node.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[Node.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[Node.SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[Node.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; - - -/***/ }), - -/***/ 9386: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var directives = __nccwpck_require__(8729); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var Node = __nccwpck_require__(9752); -var composeDoc = __nccwpck_require__(9157); -var resolveEnd = __nccwpck_require__(1789); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (Node.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; - - -/***/ }), - -/***/ 6638: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilFlowIndentCheck = __nccwpck_require__(7013); -var utilMapIncludes = __nccwpck_require__(379); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) { - const map = new YAMLMap.YAMLMap(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - // TODO: assert being at last item? - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - map.range = [bm.offset, offset, offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; - - -/***/ }), - -/***/ 8172: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; - - -/***/ }), - -/***/ 2257: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var resolveProps = __nccwpck_require__(9663); -var utilFlowIndentCheck = __nccwpck_require__(7013); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) { - const seq = new YAMLSeq.YAMLSeq(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - offset = props.end; - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - // TODO: assert being at last item? - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, offset, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; - - -/***/ }), - -/***/ 1789: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; - - -/***/ }), - -/***/ 6085: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var resolveEnd = __nccwpck_require__(1789); -var resolveProps = __nccwpck_require__(9663); -var utilContainsNewline = __nccwpck_require__(7801); -var utilMapIncludes = __nccwpck_require__(379); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const coll = isMap - ? new YAMLMap.YAMLMap(ctx.schema) - : new YAMLSeq.YAMLSeq(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (Node.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; - - -/***/ }), - -/***/ 6390: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var resolveEnd = __nccwpck_require__(1789); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', - a: '\x07', - b: '\b', - e: '\x1b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', - v: '\v', - N: '\u0085', - _: '\u00a0', - L: '\u2028', - P: '\u2029', - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; - - -/***/ }), - -/***/ 9663: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; - - -/***/ }), - -/***/ 7801: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; - - -/***/ }), - -/***/ 8517: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; - - -/***/ }), - -/***/ 7013: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var utilContainsNewline = __nccwpck_require__(7801); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; - - -/***/ }), - -/***/ 379: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (Node.isScalar(a) && - Node.isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; - - -/***/ }), - -/***/ 1785: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var toJS = __nccwpck_require__(8842); -var Schema = __nccwpck_require__(7425); -var stringify = __nccwpck_require__(1922); -var stringifyDocument = __nccwpck_require__(9615); -var anchors = __nccwpck_require__(584); -var applyReviver = __nccwpck_require__(9833); -var createNode = __nccwpck_require__(9807); -var directives = __nccwpck_require__(8729); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - if (value === undefined) - this.contents = null; - else { - this.contents = this.createNode(value, _replacer, options); - } - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [Node.NODE_TYPE]: { value: Node.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - copy.contents = Node.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && Node.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return Node.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && Node.isScalar(this.contents) - ? this.contents.value - : this.contents; - return Node.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return Node.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) - this.contents = value; - else if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, - stringify: stringify.stringify - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (Node.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; - - -/***/ }), - -/***/ 584: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (Node.isScalar(ref.node) || Node.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; - - -/***/ }), - -/***/ 9833: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; - - -/***/ }), - -/***/ 9807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (Node.isDocument(value)) - value = value.contents; - if (Node.isNode(value)) - return value; - if (Node.isPair(value)) { - const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[Node.MAP] - : Symbol.iterator in Object(value) - ? schema[Node.SEQ] - : schema[Node.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; - - -/***/ }), - -/***/ 8729: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var visit = __nccwpck_require__(8234); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) - return prefix + decodeURIComponent(suffix); - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (Node.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; - - -/***/ }), - -/***/ 3305: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.min(end.col - col, 80 - ci); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; - - -/***/ }), - -/***/ 5065: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var Schema = __nccwpck_require__(7425); -var errors = __nccwpck_require__(3305); -var Alias = __nccwpck_require__(9712); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLMap = __nccwpck_require__(6761); -var YAMLSeq = __nccwpck_require__(3810); -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); -var publicApi = __nccwpck_require__(9191); -var visit = __nccwpck_require__(8234); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = Node.isAlias; -exports.isCollection = Node.isCollection; -exports.isDocument = Node.isDocument; -exports.isMap = Node.isMap; -exports.isNode = Node.isNode; -exports.isPair = Node.isPair; -exports.isScalar = Node.isScalar; -exports.isSeq = Node.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; - - -/***/ }), - -/***/ 469: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; - - -/***/ }), - -/***/ 9712: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var visit = __nccwpck_require__(8234); -var Node = __nccwpck_require__(9752); - -class Alias extends Node.NodeBase { - constructor(source) { - super(Node.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - const data = anchors.get(source); - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (Node.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (Node.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (Node.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; - - -/***/ }), - -/***/ 9797: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (Node.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (Node.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && Node.isScalar(node) ? node.value : node; - else - return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!Node.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - Node.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return Node.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (Node.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; - - -/***/ }), - -/***/ 9752: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } -} - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.NodeBase = NodeBase; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; - - -/***/ }), - -/***/ 3497: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var stringifyPair = __nccwpck_require__(10); -var addPairToJSMap = __nccwpck_require__(5193); -var Node = __nccwpck_require__(9752); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (Node.isNode(key)) - key = key.clone(schema); - if (Node.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; - - -/***/ }), - -/***/ 8160: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var toJS = __nccwpck_require__(8842); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(Node.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; - - -/***/ }), - -/***/ 6761: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var addPairToJSMap = __nccwpck_require__(5193); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); - -function findPair(items, key) { - const k = Node.isScalar(key) ? key.value : key; - for (const it of items) { - if (Node.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (Node.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - constructor(schema) { - super(Node.MAP, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (Node.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!Node.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; - - -/***/ }), - -/***/ 3810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(3541); -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -class YAMLSeq extends Collection.Collection { - constructor(schema) { - super(Node.SEQ, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && Node.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (Node.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } -} -function asItemIndex(key) { - let idx = Node.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; - - -/***/ }), - -/***/ 5193: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var log = __nccwpck_require__(469); -var stringify = __nccwpck_require__(1922); -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var toJS = __nccwpck_require__(8842); - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (Node.isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (Node.isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!Node.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (Node.isNode(key) && ctx && ctx.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; - - -/***/ }), - -/***/ 8842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !Node.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; - - -/***/ }), - -/***/ 4000: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var resolveBlockScalar = __nccwpck_require__(8172); -var resolveFlowScalar = __nccwpck_require__(6390); -var errors = __nccwpck_require__(3305); -var stringifyString = __nccwpck_require__(6084); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; - - -/***/ }), - -/***/ 28: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; - - -/***/ }), - -/***/ 1621: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; - - -/***/ }), - -/***/ 9726: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cstScalar = __nccwpck_require__(4000); -var cstStringify = __nccwpck_require__(28); -var cstVisit = __nccwpck_require__(1621); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; - - -/***/ }), - -/***/ 7263: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - const cs = line.indexOf('#'); - if (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') - dirEnd = cs - 1; - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; - - -/***/ }), - -/***/ 5748: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; - - -/***/ }), - -/***/ 6466: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9726); -var lexer = __nccwpck_require__(7263); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; - - -/***/ }), - -/***/ 9191: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9386); -var Document = __nccwpck_require__(1785); -var errors = __nccwpck_require__(3305); -var log = __nccwpck_require__(469); -var lineCounter = __nccwpck_require__(5748); -var parser = __nccwpck_require__(6466); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; - - -/***/ }), - -/***/ 7425: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var tags = __nccwpck_require__(2102); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, Node.MAP, { value: map.map }); - Object.defineProperty(this, Node.SCALAR, { value: string.string }); - Object.defineProperty(this, Node.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; - - -/***/ }), - -/***/ 1400: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -function createMap(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new YAMLMap.YAMLMap(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; -} -const map = { - collection: 'map', - createNode: createMap, - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!Node.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.map = map; - - -/***/ }), - -/***/ 7556: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; - - -/***/ }), - -/***/ 6698: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9807); -var Node = __nccwpck_require__(9752); -var YAMLSeq = __nccwpck_require__(3810); - -function createSeq(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new YAMLSeq.YAMLSeq(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; -} -const seq = { - collection: 'seq', - createNode: createSeq, - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!Node.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - } -}; - -exports.seq = seq; - - -/***/ }), - -/***/ 1576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyString = __nccwpck_require__(6084); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; - - -/***/ }), - -/***/ 7369: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; - - -/***/ }), - -/***/ 616: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 2521: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 7504: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 8553: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var map = __nccwpck_require__(1400); -var seq = __nccwpck_require__(6698); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; - - -/***/ }), - -/***/ 2102: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var bool = __nccwpck_require__(7369); -var float = __nccwpck_require__(616); -var int = __nccwpck_require__(2521); -var schema = __nccwpck_require__(7504); -var schema$1 = __nccwpck_require__(8553); -var binary = __nccwpck_require__(5758); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var schema$2 = __nccwpck_require__(8536); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; - - -/***/ }), - -/***/ 5758: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyString = __nccwpck_require__(6084); - -const binary = { - identify: value => value instanceof Uint8Array, - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; - - -/***/ }), - -/***/ 2684: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; - - -/***/ }), - -/***/ 8090: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var stringifyNumber = __nccwpck_require__(780); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 7254: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 4486: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(3810); -var toJS = __nccwpck_require__(8842); -var Node = __nccwpck_require__(9752); -var YAMLMap = __nccwpck_require__(6761); -var pairs = __nccwpck_require__(5169); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (Node.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (Node.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new YAMLOMap(); - omap.items = pairs$1.items; - return omap; - } -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; - - -/***/ }), - -/***/ 5169: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var Scalar = __nccwpck_require__(8160); -var YAMLSeq = __nccwpck_require__(3810); - -function resolvePairs(seq, onError) { - if (Node.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (Node.isPair(item)) - continue; - else if (Node.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else - throw new TypeError(`Expected { key: value } tuple: ${it}`); - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; - - -/***/ }), - -/***/ 8536: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(1400); -var _null = __nccwpck_require__(7556); -var seq = __nccwpck_require__(6698); -var string = __nccwpck_require__(1576); -var binary = __nccwpck_require__(5758); -var bool = __nccwpck_require__(2684); -var float = __nccwpck_require__(8090); -var int = __nccwpck_require__(7254); -var omap = __nccwpck_require__(4486); -var pairs = __nccwpck_require__(5169); -var set = __nccwpck_require__(9549); -var timestamp = __nccwpck_require__(7717); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 9549: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Pair = __nccwpck_require__(3497); -var YAMLMap = __nccwpck_require__(6761); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (Node.isPair(key)) - pair = key; - else if (typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && Node.isPair(pair) - ? Node.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - resolve(map, onError) { - if (Node.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - }, - createNode(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new YAMLSet(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; - - -/***/ }), - -/***/ 7717: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(780); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => (n < 10 ? '0' + String(n) : String(n))) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; - - -/***/ }), - -/***/ 1883: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i); - end = i + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i) { - let ch = text[i + 1]; - while (ch === ' ' || ch === '\t') { - do { - ch = text[(i += 1)]; - } while (ch && ch !== '\n'); - ch = text[i + 1]; - } - return i; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; - - -/***/ }), - -/***/ 1922: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(584); -var Node = __nccwpck_require__(9752); -var stringifyComment = __nccwpck_require__(5577); -var stringifyString = __nccwpck_require__(6084); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (Node.isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (Node.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (Node.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = Node.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : Node.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return Node.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; - - -/***/ }), - -/***/ 3541: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Collection = __nccwpck_require__(9797); -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { - const { indent, indentStep, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = Node.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik && ik.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - let str; - const { start, end } = flowChars; - if (lines.length === 0) { - str = start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength; - } - if (reqNewline) { - str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - str += `\n${indent}${end}`; - } - else { - str = `${start} ${lines.join(' ')} ${end}`; - } - } - if (comment) { - str += stringifyComment.lineComment(str, commentString(comment), indent); - if (onComment) - onComment(); - } - return str; -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; - - -/***/ }), - -/***/ 5577: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; - - -/***/ }), - -/***/ 9615: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (Node.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; - - -/***/ }), - -/***/ 780: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; - - -/***/ }), - -/***/ 10: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); -var Scalar = __nccwpck_require__(8160); -var stringify = __nccwpck_require__(1922); -var stringifyComment = __nccwpck_require__(5577); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (Node.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (Node.isCollection(key)) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - Node.isCollection(key) || - (Node.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vcb = ''; - let valueComment = null; - if (Node.isNode(value)) { - if (value.spaceBefore) - vcb = '\n'; - if (value.commentBefore) { - const cs = commentString(value.commentBefore); - vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - valueComment = value.comment; - } - else if (value && typeof value === 'object') { - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && Node.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - Node.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substr(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (vcb || keyComment) { - if (valueStr === '' && !ctx.inFlow) - ws = vcb === '\n' ? '\n\n' : vcb; - else - ws = `${vcb}\n${ctx.indent}`; - } - else if (!explicitKey && Node.isCollection(value)) { - const flow = valueStr[0] === '[' || valueStr[0] === '{'; - if (!flow || valueStr.includes('\n')) - ws = `\n${ctx.indent}`; - } - else if (valueStr === '' || valueStr[0] === '\n') - ws = ''; - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; - - -/***/ }), - -/***/ 6084: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(8160); -var foldFlowLines = __nccwpck_require__(1883); - -const getFoldOptions = (ctx) => ({ - indentAtStart: ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -function blockString({ comment, type, value }, ctx, onComment, onChompKeep) { - const { blockQuote, commentString, lineWidth } = ctx.options; - // 1. Block can't end in whitespace unless the last line is non-empty. - // 2. Strings consisting of only whitespace are best rendered explicitly. - if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { - return quotedString(value, ctx); - } - const indent = ctx.indent || - (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); - const literal = blockQuote === 'literal' - ? true - : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED - ? false - : type === Scalar.Scalar.BLOCK_LITERAL - ? true - : !lineLengthOverLimit(value, lineWidth, indent.length); - if (!value) - return literal ? '|\n' : '>\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, inFlow } = ctx; - if ((implicitKey && /[\n[\]{},]/.test(value)) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (indent === '' && containsDocumentMarker(value)) { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; - - -/***/ }), - -/***/ 8234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(9752); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (Node.isMap(node)) - return visitor.Map?.(key, node, path); - if (Node.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (Node.isPair(node)) - return visitor.Pair?.(key, node, path); - if (Node.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (Node.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (Node.isCollection(parent)) { - parent.items[key] = node; - } - else if (Node.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (Node.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = Node.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; - - -/***/ }), - -/***/ 2877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 68: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]'); - -/***/ }), - -/***/ 2357: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 7619: -/***/ ((module) => { - -"use strict"; -module.exports = require("constants"); - -/***/ }), - -/***/ 8614: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 5747: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 8605: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 7211: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1631: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 2087: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 5622: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 4213: -/***/ ((module) => { - -"use strict"; -module.exports = require("punycode"); - -/***/ }), - -/***/ 2413: -/***/ ((module) => { - -"use strict"; -module.exports = require("stream"); - -/***/ }), - -/***/ 4016: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 8835: -/***/ ((module) => { - -"use strict"; -module.exports = require("url"); - -/***/ }), - -/***/ 1669: -/***/ ((module) => { - -"use strict"; -module.exports = require("util"); - -/***/ }), - -/***/ 8761: -/***/ ((module) => { - -"use strict"; -module.exports = require("zlib"); - -/***/ }) - -/******/ }); -/************************************************************************/ -/******/ // The module cache -/******/ var __webpack_module_cache__ = {}; -/******/ -/******/ // The require function -/******/ function __nccwpck_require__(moduleId) { -/******/ // Check if module is in cache -/******/ var cachedModule = __webpack_module_cache__[moduleId]; -/******/ if (cachedModule !== undefined) { -/******/ return cachedModule.exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = __webpack_module_cache__[moduleId] = { -/******/ // no module.id needed -/******/ // no module.loaded needed -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete __webpack_module_cache__[moduleId]; -/******/ } -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/************************************************************************/ -/******/ /* webpack/runtime/compat */ -/******/ -/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; -/******/ -/************************************************************************/ -/******/ -/******/ // startup -/******/ // Load entry module and return exports -/******/ // This entry module is referenced by other modules so it can't be inlined -/******/ var __webpack_exports__ = __nccwpck_require__(3109); -/******/ module.exports = __webpack_exports__; -/******/ -/******/ })() -; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/.github/actions/verify-chart-version/dist/index.js.map b/.github/actions/verify-chart-version/dist/index.js.map deleted file mode 100644 index 1f69fee4..00000000 --- a/.github/actions/verify-chart-version/dist/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../webpack://verify-chart-version-bump/./lib/main.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/core.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/file-command.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/path-utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/summary.js","../webpack://verify-chart-version-bump/./node_modules/@actions/core/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/context.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/github.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/internal/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/github/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/auth.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/@actions/http-client/lib/proxy.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/core/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/@octokit/request/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/index.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/add.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/register.js","../webpack://verify-chart-version-bump/./node_modules/before-after-hook/lib/remove.js","../webpack://verify-chart-version-bump/./node_modules/deprecation/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/copy.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/copy/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/empty/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/file.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/link.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-paths.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink-type.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/ensure/symlink.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/fs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/jsonfile.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/json/output-json.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/make-dir.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/mkdirs/utils.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move-sync.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/move/move.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/output-file/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/path-exists/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/index.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/remove/rimraf.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/stat.js","../webpack://verify-chart-version-bump/./node_modules/fs-extra/lib/util/utimes.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/clone.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/graceful-fs.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/legacy-streams.js","../webpack://verify-chart-version-bump/./node_modules/graceful-fs/polyfills.js","../webpack://verify-chart-version-bump/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/index.js","../webpack://verify-chart-version-bump/./node_modules/jsonfile/utils.js","../webpack://verify-chart-version-bump/./node_modules/lru-cache/index.js","../webpack://verify-chart-version-bump/./node_modules/node-fetch/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/once/once.js","../webpack://verify-chart-version-bump/./node_modules/semver/classes/comparator.js","../webpack://verify-chart-version-bump/./node_modules/semver/classes/range.js","../webpack://verify-chart-version-bump/./node_modules/semver/classes/semver.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/clean.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/cmp.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/coerce.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/compare-build.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/compare-loose.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/compare.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/diff.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/eq.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/gt.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/gte.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/inc.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/lt.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/lte.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/major.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/minor.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/neq.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/parse.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/patch.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/prerelease.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/rcompare.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/rsort.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/satisfies.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/sort.js","../webpack://verify-chart-version-bump/./node_modules/semver/functions/valid.js","../webpack://verify-chart-version-bump/./node_modules/semver/index.js","../webpack://verify-chart-version-bump/./node_modules/semver/internal/constants.js","../webpack://verify-chart-version-bump/./node_modules/semver/internal/debug.js","../webpack://verify-chart-version-bump/./node_modules/semver/internal/identifiers.js","../webpack://verify-chart-version-bump/./node_modules/semver/internal/parse-options.js","../webpack://verify-chart-version-bump/./node_modules/semver/internal/re.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/gtr.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/intersects.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/ltr.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/max-satisfying.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/min-satisfying.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/min-version.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/outside.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/simplify.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/subset.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/to-comparators.js","../webpack://verify-chart-version-bump/./node_modules/semver/ranges/valid.js","../webpack://verify-chart-version-bump/./node_modules/tr46/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/index.js","../webpack://verify-chart-version-bump/./node_modules/tunnel/lib/tunnel.js","../webpack://verify-chart-version-bump/./node_modules/universal-user-agent/dist-node/index.js","../webpack://verify-chart-version-bump/./node_modules/universalify/index.js","../webpack://verify-chart-version-bump/./node_modules/webidl-conversions/lib/index.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/URL.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/public-api.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://verify-chart-version-bump/./node_modules/whatwg-url/lib/utils.js","../webpack://verify-chart-version-bump/./node_modules/wrappy/wrappy.js","../webpack://verify-chart-version-bump/./node_modules/yallist/iterator.js","../webpack://verify-chart-version-bump/./node_modules/yallist/yallist.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/composer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/Document.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/anchors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/createNode.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/doc/directives.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/errors.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/index.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/log.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Alias.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Collection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Node.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Pair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/nodes/toJS.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/cst.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/lexer.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/line-counter.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/parse/parser.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/public-api.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/Schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/map.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/null.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/seq.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/common/string.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/core/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/json/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/tags.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringify.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://verify-chart-version-bump/./node_modules/yaml/dist/visit.js","../webpack://verify-chart-version-bump/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://verify-chart-version-bump/external \"assert\"","../webpack://verify-chart-version-bump/external \"constants\"","../webpack://verify-chart-version-bump/external \"events\"","../webpack://verify-chart-version-bump/external \"fs\"","../webpack://verify-chart-version-bump/external \"http\"","../webpack://verify-chart-version-bump/external \"https\"","../webpack://verify-chart-version-bump/external \"net\"","../webpack://verify-chart-version-bump/external \"os\"","../webpack://verify-chart-version-bump/external \"path\"","../webpack://verify-chart-version-bump/external \"punycode\"","../webpack://verify-chart-version-bump/external \"stream\"","../webpack://verify-chart-version-bump/external \"tls\"","../webpack://verify-chart-version-bump/external \"url\"","../webpack://verify-chart-version-bump/external \"util\"","../webpack://verify-chart-version-bump/external \"zlib\"","../webpack://verify-chart-version-bump/webpack/bootstrap","../webpack://verify-chart-version-bump/webpack/runtime/compat","../webpack://verify-chart-version-bump/webpack/startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst YAML = __importStar(require(\"yaml\"));\nconst semver = __importStar(require(\"semver\"));\nconst fs = __importStar(require(\"fs-extra\"));\nfunction getErrorMessage(error) {\n if (error instanceof Error)\n return error.message;\n return String(error);\n}\nfunction run() {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n try {\n if (github.context.eventName !== \"pull_request\") {\n core.setFailed(\"This action can only run on pull requests!\");\n return;\n }\n const githubToken = core.getInput(\"token\");\n const chart = core.getInput(\"chart\", { required: true });\n const base = core.getInput(\"base\", { required: false });\n const chartYamlPath = `${chart}/Chart.yaml`;\n const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch;\n const octokit = github.getOctokit(githubToken);\n if (!(yield fs.pathExists(chartYamlPath))) {\n core.setFailed(`${chart} is not a valid Helm chart folder!`);\n return;\n }\n if (base) {\n try {\n yield octokit.rest.git.getRef({\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n ref: base,\n });\n }\n catch (error) {\n core.setFailed(`Ref ${base} was not found for this repository!`);\n return;\n }\n }\n var originalChartYamlFile;\n var originalChartVersion;\n try {\n originalChartYamlFile = yield octokit.rest.repos.getContent({\n owner: github.context.repo.owner,\n repo: github.context.repo.repo,\n path: `${chartYamlPath}`,\n ref: base || `heads/${defaultBranch}`,\n });\n }\n catch (error) {\n core.warning(`Could not find original Chart.yaml for ${chart}, assuming this is a new chart.`);\n }\n if (originalChartYamlFile && \"content\" in originalChartYamlFile.data) {\n const originalChartYamlContent = Buffer.from(originalChartYamlFile.data.content, \"base64\").toString(\"utf-8\");\n const originalChartYaml = yield YAML.parse(originalChartYamlContent);\n originalChartVersion = originalChartYaml.version;\n }\n const updatedChartYamlContent = yield fs.readFile(chartYamlPath, \"utf8\");\n const updatedChartYaml = yield YAML.parse(updatedChartYamlContent);\n if (!updatedChartYaml.version) {\n core.setFailed(`${chartYamlPath} does not contain a version!`);\n return;\n }\n const updatedChartVersion = updatedChartYaml.version;\n if (!semver.valid(updatedChartVersion)) {\n core.setFailed(`${updatedChartVersion} is not a valid SemVer version!`);\n return;\n }\n if (originalChartVersion) {\n if (updatedChartVersion == originalChartVersion) {\n core.setFailed(`Chart version has not been updated!`);\n return;\n }\n if (!semver.gt(updatedChartVersion, originalChartVersion)) {\n core.setFailed(`Updated chart version ${updatedChartVersion} is < ${originalChartVersion}!`);\n return;\n }\n core.info(`Old chart version: ${originalChartVersion}`);\n }\n core.info(`New chart version: ${updatedChartVersion}`);\n core.info(`New chart version verified succesfully.`);\n }\n catch (error) {\n core.setFailed(getErrorMessage(error));\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.21.3\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n enumerableOnly && (symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n })), keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = null != arguments[i] ? arguments[i] : {};\n i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/audit-log\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /enterprises/{enterprise}/settings/billing/advanced-security\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/audit-log\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/external-groups\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/settings/billing/advanced-security\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForEnterprise: [\"GET /enterprises/{enterprise}/actions/cache/usage\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubAdvancedSecurityBillingGhe: [\"GET /enterprises/{enterprise}/settings/billing/advanced-security\"],\n getGithubAdvancedSecurityBillingOrg: [\"GET /orgs/{org}/settings/billing/advanced-security\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n getServerStatistics: [\"GET /enterprise-installation/{enterprise_or_org}/server-statistics\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.16.2\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirsSync = require('../mkdirs').mkdirsSync\nconst utimesMillisSync = require('../util/utimes').utimesMillisSync\nconst stat = require('../util/stat')\n\nfunction copySync (src, dest, opts) {\n if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n opts = opts || {}\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0002'\n )\n }\n\n const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'copy')\n return handleFilterAndCopy(destStat, src, dest, opts)\n}\n\nfunction handleFilterAndCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n const destParent = path.dirname(dest)\n if (!fs.existsSync(destParent)) mkdirsSync(destParent)\n return getStats(destStat, src, dest, opts)\n}\n\nfunction startCopy (destStat, src, dest, opts) {\n if (opts.filter && !opts.filter(src, dest)) return\n return getStats(destStat, src, dest, opts)\n}\n\nfunction getStats (destStat, src, dest, opts) {\n const statSync = opts.dereference ? fs.statSync : fs.lstatSync\n const srcStat = statSync(src)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)\n else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)\n else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)\n throw new Error(`Unknown file: ${src}`)\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts) {\n if (!destStat) return copyFile(srcStat, src, dest, opts)\n return mayCopyFile(srcStat, src, dest, opts)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts) {\n if (opts.overwrite) {\n fs.unlinkSync(dest)\n return copyFile(srcStat, src, dest, opts)\n } else if (opts.errorOnExist) {\n throw new Error(`'${dest}' already exists`)\n }\n}\n\nfunction copyFile (srcStat, src, dest, opts) {\n fs.copyFileSync(src, dest)\n if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)\n return setDestMode(dest, srcStat.mode)\n}\n\nfunction handleTimestamps (srcMode, src, dest) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)\n return setDestTimestamps(src, dest)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode) {\n return setDestMode(dest, srcMode | 0o200)\n}\n\nfunction setDestMode (dest, srcMode) {\n return fs.chmodSync(dest, srcMode)\n}\n\nfunction setDestTimestamps (src, dest) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n const updatedSrcStat = fs.statSync(src)\n return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)\n return copyDir(src, dest, opts)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts) {\n fs.mkdirSync(dest)\n copyDir(src, dest, opts)\n return setDestMode(dest, srcMode)\n}\n\nfunction copyDir (src, dest, opts) {\n fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))\n}\n\nfunction copyDirItem (item, src, dest, opts) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)\n return startCopy(destStat, srcItem, destItem, opts)\n}\n\nfunction onLink (destStat, src, dest, opts) {\n let resolvedSrc = fs.readlinkSync(src)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlinkSync(resolvedSrc, dest)\n } else {\n let resolvedDest\n try {\n resolvedDest = fs.readlinkSync(dest)\n } catch (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)\n throw err\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)\n }\n\n // prevent copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)\n }\n return copyLink(resolvedSrc, dest)\n }\n}\n\nfunction copyLink (resolvedSrc, dest) {\n fs.unlinkSync(dest)\n return fs.symlinkSync(resolvedSrc, dest)\n}\n\nmodule.exports = copySync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdirs = require('../mkdirs').mkdirs\nconst pathExists = require('../path-exists').pathExists\nconst utimesMillis = require('../util/utimes').utimesMillis\nconst stat = require('../util/stat')\n\nfunction copy (src, dest, opts, cb) {\n if (typeof opts === 'function' && !cb) {\n cb = opts\n opts = {}\n } else if (typeof opts === 'function') {\n opts = { filter: opts }\n }\n\n cb = cb || function () {}\n opts = opts || {}\n\n opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now\n opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber\n\n // Warn about using preserveTimestamps on 32-bit node\n if (opts.preserveTimestamps && process.arch === 'ia32') {\n process.emitWarning(\n 'Using the preserveTimestamps option in 32-bit node is not recommended;\\n\\n' +\n '\\tsee https://github.com/jprichardson/node-fs-extra/issues/269',\n 'Warning', 'fs-extra-WARN0001'\n )\n }\n\n stat.checkPaths(src, dest, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n stat.checkParentPaths(src, srcStat, dest, 'copy', err => {\n if (err) return cb(err)\n if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)\n return checkParentDir(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction checkParentDir (destStat, src, dest, opts, cb) {\n const destParent = path.dirname(dest)\n pathExists(destParent, (err, dirExists) => {\n if (err) return cb(err)\n if (dirExists) return getStats(destStat, src, dest, opts, cb)\n mkdirs(destParent, err => {\n if (err) return cb(err)\n return getStats(destStat, src, dest, opts, cb)\n })\n })\n}\n\nfunction handleFilter (onInclude, destStat, src, dest, opts, cb) {\n Promise.resolve(opts.filter(src, dest)).then(include => {\n if (include) return onInclude(destStat, src, dest, opts, cb)\n return cb()\n }, error => cb(error))\n}\n\nfunction startCopy (destStat, src, dest, opts, cb) {\n if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)\n return getStats(destStat, src, dest, opts, cb)\n}\n\nfunction getStats (destStat, src, dest, opts, cb) {\n const stat = opts.dereference ? fs.stat : fs.lstat\n stat(src, (err, srcStat) => {\n if (err) return cb(err)\n\n if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isFile() ||\n srcStat.isCharacterDevice() ||\n srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)\n else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)\n else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`))\n else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`))\n return cb(new Error(`Unknown file: ${src}`))\n })\n}\n\nfunction onFile (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return copyFile(srcStat, src, dest, opts, cb)\n return mayCopyFile(srcStat, src, dest, opts, cb)\n}\n\nfunction mayCopyFile (srcStat, src, dest, opts, cb) {\n if (opts.overwrite) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return copyFile(srcStat, src, dest, opts, cb)\n })\n } else if (opts.errorOnExist) {\n return cb(new Error(`'${dest}' already exists`))\n } else return cb()\n}\n\nfunction copyFile (srcStat, src, dest, opts, cb) {\n fs.copyFile(src, dest, err => {\n if (err) return cb(err)\n if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)\n return setDestMode(dest, srcStat.mode, cb)\n })\n}\n\nfunction handleTimestampsAndMode (srcMode, src, dest, cb) {\n // Make sure the file is writable before setting the timestamp\n // otherwise open fails with EPERM when invoked with 'r+'\n // (through utimes call)\n if (fileIsNotWritable(srcMode)) {\n return makeFileWritable(dest, srcMode, err => {\n if (err) return cb(err)\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n })\n }\n return setDestTimestampsAndMode(srcMode, src, dest, cb)\n}\n\nfunction fileIsNotWritable (srcMode) {\n return (srcMode & 0o200) === 0\n}\n\nfunction makeFileWritable (dest, srcMode, cb) {\n return setDestMode(dest, srcMode | 0o200, cb)\n}\n\nfunction setDestTimestampsAndMode (srcMode, src, dest, cb) {\n setDestTimestamps(src, dest, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n}\n\nfunction setDestMode (dest, srcMode, cb) {\n return fs.chmod(dest, srcMode, cb)\n}\n\nfunction setDestTimestamps (src, dest, cb) {\n // The initial srcStat.atime cannot be trusted\n // because it is modified by the read(2) system call\n // (See https://nodejs.org/api/fs.html#fs_stat_time_values)\n fs.stat(src, (err, updatedSrcStat) => {\n if (err) return cb(err)\n return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)\n })\n}\n\nfunction onDir (srcStat, destStat, src, dest, opts, cb) {\n if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)\n return copyDir(src, dest, opts, cb)\n}\n\nfunction mkDirAndCopy (srcMode, src, dest, opts, cb) {\n fs.mkdir(dest, err => {\n if (err) return cb(err)\n copyDir(src, dest, opts, err => {\n if (err) return cb(err)\n return setDestMode(dest, srcMode, cb)\n })\n })\n}\n\nfunction copyDir (src, dest, opts, cb) {\n fs.readdir(src, (err, items) => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n}\n\nfunction copyDirItems (items, src, dest, opts, cb) {\n const item = items.pop()\n if (!item) return cb()\n return copyDirItem(items, item, src, dest, opts, cb)\n}\n\nfunction copyDirItem (items, item, src, dest, opts, cb) {\n const srcItem = path.join(src, item)\n const destItem = path.join(dest, item)\n stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => {\n if (err) return cb(err)\n const { destStat } = stats\n startCopy(destStat, srcItem, destItem, opts, err => {\n if (err) return cb(err)\n return copyDirItems(items, src, dest, opts, cb)\n })\n })\n}\n\nfunction onLink (destStat, src, dest, opts, cb) {\n fs.readlink(src, (err, resolvedSrc) => {\n if (err) return cb(err)\n if (opts.dereference) {\n resolvedSrc = path.resolve(process.cwd(), resolvedSrc)\n }\n\n if (!destStat) {\n return fs.symlink(resolvedSrc, dest, cb)\n } else {\n fs.readlink(dest, (err, resolvedDest) => {\n if (err) {\n // dest exists and is a regular file or directory,\n // Windows may throw UNKNOWN error. If dest already exists,\n // fs throws error anyway, so no need to guard against it here.\n if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)\n return cb(err)\n }\n if (opts.dereference) {\n resolvedDest = path.resolve(process.cwd(), resolvedDest)\n }\n if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {\n return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))\n }\n\n // do not copy if src is a subdir of dest since unlinking\n // dest in this case would result in removing src contents\n // and therefore a broken symlink would be created.\n if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {\n return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))\n }\n return copyLink(resolvedSrc, dest, cb)\n })\n }\n })\n}\n\nfunction copyLink (resolvedSrc, dest, cb) {\n fs.unlink(dest, err => {\n if (err) return cb(err)\n return fs.symlink(resolvedSrc, dest, cb)\n })\n}\n\nmodule.exports = copy\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n copy: u(require('./copy')),\n copySync: require('./copy-sync')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst remove = require('../remove')\n\nconst emptyDir = u(async function emptyDir (dir) {\n let items\n try {\n items = await fs.readdir(dir)\n } catch {\n return mkdir.mkdirs(dir)\n }\n\n return Promise.all(items.map(item => remove.remove(path.join(dir, item))))\n})\n\nfunction emptyDirSync (dir) {\n let items\n try {\n items = fs.readdirSync(dir)\n } catch {\n return mkdir.mkdirsSync(dir)\n }\n\n items.forEach(item => {\n item = path.join(dir, item)\n remove.removeSync(item)\n })\n}\n\nmodule.exports = {\n emptyDirSync,\n emptydirSync: emptyDirSync,\n emptyDir,\n emptydir: emptyDir\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\n\nfunction createFile (file, callback) {\n function makeFile () {\n fs.writeFile(file, '', err => {\n if (err) return callback(err)\n callback()\n })\n }\n\n fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err\n if (!err && stats.isFile()) return callback()\n const dir = path.dirname(file)\n fs.stat(dir, (err, stats) => {\n if (err) {\n // if the directory doesn't exist, make it\n if (err.code === 'ENOENT') {\n return mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeFile()\n })\n }\n return callback(err)\n }\n\n if (stats.isDirectory()) makeFile()\n else {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdir(dir, err => {\n if (err) return callback(err)\n })\n }\n })\n })\n}\n\nfunction createFileSync (file) {\n let stats\n try {\n stats = fs.statSync(file)\n } catch {}\n if (stats && stats.isFile()) return\n\n const dir = path.dirname(file)\n try {\n if (!fs.statSync(dir).isDirectory()) {\n // parent is not a directory\n // This is just to cause an internal ENOTDIR error to be thrown\n fs.readdirSync(dir)\n }\n } catch (err) {\n // If the stat call above failed because the directory doesn't exist, create it\n if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)\n else throw err\n }\n\n fs.writeFileSync(file, '')\n}\n\nmodule.exports = {\n createFile: u(createFile),\n createFileSync\n}\n","'use strict'\n\nconst { createFile, createFileSync } = require('./file')\nconst { createLink, createLinkSync } = require('./link')\nconst { createSymlink, createSymlinkSync } = require('./symlink')\n\nmodule.exports = {\n // file\n createFile,\n createFileSync,\n ensureFile: createFile,\n ensureFileSync: createFileSync,\n // link\n createLink,\n createLinkSync,\n ensureLink: createLink,\n ensureLinkSync: createLinkSync,\n // symlink\n createSymlink,\n createSymlinkSync,\n ensureSymlink: createSymlink,\n ensureSymlinkSync: createSymlinkSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\nconst { areIdentical } = require('../util/stat')\n\nfunction createLink (srcpath, dstpath, callback) {\n function makeLink (srcpath, dstpath) {\n fs.link(srcpath, dstpath, err => {\n if (err) return callback(err)\n callback(null)\n })\n }\n\n fs.lstat(dstpath, (_, dstStat) => {\n fs.lstat(srcpath, (err, srcStat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n return callback(err)\n }\n if (dstStat && areIdentical(srcStat, dstStat)) return callback(null)\n\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return makeLink(srcpath, dstpath)\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeLink(srcpath, dstpath)\n })\n })\n })\n })\n}\n\nfunction createLinkSync (srcpath, dstpath) {\n let dstStat\n try {\n dstStat = fs.lstatSync(dstpath)\n } catch {}\n\n try {\n const srcStat = fs.lstatSync(srcpath)\n if (dstStat && areIdentical(srcStat, dstStat)) return\n } catch (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n throw err\n }\n\n const dir = path.dirname(dstpath)\n const dirExists = fs.existsSync(dir)\n if (dirExists) return fs.linkSync(srcpath, dstpath)\n mkdir.mkdirsSync(dir)\n\n return fs.linkSync(srcpath, dstpath)\n}\n\nmodule.exports = {\n createLink: u(createLink),\n createLinkSync\n}\n","'use strict'\n\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst pathExists = require('../path-exists').pathExists\n\n/**\n * Function that returns two types of paths, one relative to symlink, and one\n * relative to the current working directory. Checks if path is absolute or\n * relative. If the path is relative, this function checks if the path is\n * relative to symlink or relative to current working directory. This is an\n * initiative to find a smarter `srcpath` to supply when building symlinks.\n * This allows you to determine which path to use out of one of three possible\n * types of source paths. The first is an absolute path. This is detected by\n * `path.isAbsolute()`. When an absolute path is provided, it is checked to\n * see if it exists. If it does it's used, if not an error is returned\n * (callback)/ thrown (sync). The other two options for `srcpath` are a\n * relative url. By default Node's `fs.symlink` works by creating a symlink\n * using `dstpath` and expects the `srcpath` to be relative to the newly\n * created symlink. If you provide a `srcpath` that does not exist on the file\n * system it results in a broken symlink. To minimize this, the function\n * checks to see if the 'relative to symlink' source file exists, and if it\n * does it will use it. If it does not, it checks if there's a file that\n * exists that is relative to the current working directory, if does its used.\n * This preserves the expectations of the original fs.symlink spec and adds\n * the ability to pass in `relative to current working direcotry` paths.\n */\n\nfunction symlinkPaths (srcpath, dstpath, callback) {\n if (path.isAbsolute(srcpath)) {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: srcpath\n })\n })\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n return pathExists(relativeToDst, (err, exists) => {\n if (err) return callback(err)\n if (exists) {\n return callback(null, {\n toCwd: relativeToDst,\n toDst: srcpath\n })\n } else {\n return fs.lstat(srcpath, (err) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n })\n })\n }\n })\n }\n}\n\nfunction symlinkPathsSync (srcpath, dstpath) {\n let exists\n if (path.isAbsolute(srcpath)) {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('absolute srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: srcpath\n }\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n exists = fs.existsSync(relativeToDst)\n if (exists) {\n return {\n toCwd: relativeToDst,\n toDst: srcpath\n }\n } else {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('relative srcpath does not exist')\n return {\n toCwd: srcpath,\n toDst: path.relative(dstdir, srcpath)\n }\n }\n }\n}\n\nmodule.exports = {\n symlinkPaths,\n symlinkPathsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction symlinkType (srcpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n if (type) return callback(null, type)\n fs.lstat(srcpath, (err, stats) => {\n if (err) return callback(null, 'file')\n type = (stats && stats.isDirectory()) ? 'dir' : 'file'\n callback(null, type)\n })\n}\n\nfunction symlinkTypeSync (srcpath, type) {\n let stats\n\n if (type) return type\n try {\n stats = fs.lstatSync(srcpath)\n } catch {\n return 'file'\n }\n return (stats && stats.isDirectory()) ? 'dir' : 'file'\n}\n\nmodule.exports = {\n symlinkType,\n symlinkTypeSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('../fs')\nconst _mkdirs = require('../mkdirs')\nconst mkdirs = _mkdirs.mkdirs\nconst mkdirsSync = _mkdirs.mkdirsSync\n\nconst _symlinkPaths = require('./symlink-paths')\nconst symlinkPaths = _symlinkPaths.symlinkPaths\nconst symlinkPathsSync = _symlinkPaths.symlinkPathsSync\n\nconst _symlinkType = require('./symlink-type')\nconst symlinkType = _symlinkType.symlinkType\nconst symlinkTypeSync = _symlinkType.symlinkTypeSync\n\nconst pathExists = require('../path-exists').pathExists\n\nconst { areIdentical } = require('../util/stat')\n\nfunction createSymlink (srcpath, dstpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n\n fs.lstat(dstpath, (err, stats) => {\n if (!err && stats.isSymbolicLink()) {\n Promise.all([\n fs.stat(srcpath),\n fs.stat(dstpath)\n ]).then(([srcStat, dstStat]) => {\n if (areIdentical(srcStat, dstStat)) return callback(null)\n _createSymlink(srcpath, dstpath, type, callback)\n })\n } else _createSymlink(srcpath, dstpath, type, callback)\n })\n}\n\nfunction _createSymlink (srcpath, dstpath, type, callback) {\n symlinkPaths(srcpath, dstpath, (err, relative) => {\n if (err) return callback(err)\n srcpath = relative.toDst\n symlinkType(relative.toCwd, type, (err, type) => {\n if (err) return callback(err)\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)\n mkdirs(dir, err => {\n if (err) return callback(err)\n fs.symlink(srcpath, dstpath, type, callback)\n })\n })\n })\n })\n}\n\nfunction createSymlinkSync (srcpath, dstpath, type) {\n let stats\n try {\n stats = fs.lstatSync(dstpath)\n } catch {}\n if (stats && stats.isSymbolicLink()) {\n const srcStat = fs.statSync(srcpath)\n const dstStat = fs.statSync(dstpath)\n if (areIdentical(srcStat, dstStat)) return\n }\n\n const relative = symlinkPathsSync(srcpath, dstpath)\n srcpath = relative.toDst\n type = symlinkTypeSync(relative.toCwd, type)\n const dir = path.dirname(dstpath)\n const exists = fs.existsSync(dir)\n if (exists) return fs.symlinkSync(srcpath, dstpath, type)\n mkdirsSync(dir)\n return fs.symlinkSync(srcpath, dstpath, type)\n}\n\nmodule.exports = {\n createSymlink: u(createSymlink),\n createSymlinkSync\n}\n","'use strict'\n// This is adapted from https://github.com/normalize/mz\n// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\n\nconst api = [\n 'access',\n 'appendFile',\n 'chmod',\n 'chown',\n 'close',\n 'copyFile',\n 'fchmod',\n 'fchown',\n 'fdatasync',\n 'fstat',\n 'fsync',\n 'ftruncate',\n 'futimes',\n 'lchmod',\n 'lchown',\n 'link',\n 'lstat',\n 'mkdir',\n 'mkdtemp',\n 'open',\n 'opendir',\n 'readdir',\n 'readFile',\n 'readlink',\n 'realpath',\n 'rename',\n 'rm',\n 'rmdir',\n 'stat',\n 'symlink',\n 'truncate',\n 'unlink',\n 'utimes',\n 'writeFile'\n].filter(key => {\n // Some commands are not available on some systems. Ex:\n // fs.opendir was added in Node.js v12.12.0\n // fs.rm was added in Node.js v14.14.0\n // fs.lchown is not available on at least some Linux\n return typeof fs[key] === 'function'\n})\n\n// Export cloned fs:\nObject.assign(exports, fs)\n\n// Universalify async methods:\napi.forEach(method => {\n exports[method] = u(fs[method])\n})\n\n// We differ from mz/fs in that we still ship the old, broken, fs.exists()\n// since we are a drop-in replacement for the native module\nexports.exists = function (filename, callback) {\n if (typeof callback === 'function') {\n return fs.exists(filename, callback)\n }\n return new Promise(resolve => {\n return fs.exists(filename, resolve)\n })\n}\n\n// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args\n\nexports.read = function (fd, buffer, offset, length, position, callback) {\n if (typeof callback === 'function') {\n return fs.read(fd, buffer, offset, length, position, callback)\n }\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {\n if (err) return reject(err)\n resolve({ bytesRead, buffer })\n })\n })\n}\n\n// Function signature can be\n// fs.write(fd, buffer[, offset[, length[, position]]], callback)\n// OR\n// fs.write(fd, string[, position[, encoding]], callback)\n// We need to handle both cases, so we use ...args\nexports.write = function (fd, buffer, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.write(fd, buffer, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffer })\n })\n })\n}\n\n// fs.writev only available in Node v12.9.0+\nif (typeof fs.writev === 'function') {\n // Function signature is\n // s.writev(fd, buffers[, position], callback)\n // We need to handle the optional arg, so we use ...args\n exports.writev = function (fd, buffers, ...args) {\n if (typeof args[args.length - 1] === 'function') {\n return fs.writev(fd, buffers, ...args)\n }\n\n return new Promise((resolve, reject) => {\n fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffers })\n })\n })\n }\n}\n\n// fs.realpath.native sometimes not available if fs is monkey-patched\nif (typeof fs.realpath.native === 'function') {\n exports.realpath.native = u(fs.realpath.native)\n} else {\n process.emitWarning(\n 'fs.realpath.native is not a function. Is fs being monkey-patched?',\n 'Warning', 'fs-extra-WARN0003'\n )\n}\n","'use strict'\n\nmodule.exports = {\n // Export promiseified graceful-fs:\n ...require('./fs'),\n // Export extra methods:\n ...require('./copy'),\n ...require('./empty'),\n ...require('./ensure'),\n ...require('./json'),\n ...require('./mkdirs'),\n ...require('./move'),\n ...require('./output-file'),\n ...require('./path-exists'),\n ...require('./remove')\n}\n","'use strict'\n\nconst u = require('universalify').fromPromise\nconst jsonFile = require('./jsonfile')\n\njsonFile.outputJson = u(require('./output-json'))\njsonFile.outputJsonSync = require('./output-json-sync')\n// aliases\njsonFile.outputJSON = jsonFile.outputJson\njsonFile.outputJSONSync = jsonFile.outputJsonSync\njsonFile.writeJSON = jsonFile.writeJson\njsonFile.writeJSONSync = jsonFile.writeJsonSync\njsonFile.readJSON = jsonFile.readJson\njsonFile.readJSONSync = jsonFile.readJsonSync\n\nmodule.exports = jsonFile\n","'use strict'\n\nconst jsonFile = require('jsonfile')\n\nmodule.exports = {\n // jsonfile exports\n readJson: jsonFile.readFile,\n readJsonSync: jsonFile.readFileSync,\n writeJson: jsonFile.writeFile,\n writeJsonSync: jsonFile.writeFileSync\n}\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFileSync } = require('../output-file')\n\nfunction outputJsonSync (file, data, options) {\n const str = stringify(data, options)\n\n outputFileSync(file, str, options)\n}\n\nmodule.exports = outputJsonSync\n","'use strict'\n\nconst { stringify } = require('jsonfile/utils')\nconst { outputFile } = require('../output-file')\n\nasync function outputJson (file, data, options = {}) {\n const str = stringify(data, options)\n\n await outputFile(file, str, options)\n}\n\nmodule.exports = outputJson\n","'use strict'\nconst u = require('universalify').fromPromise\nconst { makeDir: _makeDir, makeDirSync } = require('./make-dir')\nconst makeDir = u(_makeDir)\n\nmodule.exports = {\n mkdirs: makeDir,\n mkdirsSync: makeDirSync,\n // alias\n mkdirp: makeDir,\n mkdirpSync: makeDirSync,\n ensureDir: makeDir,\n ensureDirSync: makeDirSync\n}\n","'use strict'\nconst fs = require('../fs')\nconst { checkPath } = require('./utils')\n\nconst getMode = options => {\n const defaults = { mode: 0o777 }\n if (typeof options === 'number') return options\n return ({ ...defaults, ...options }).mode\n}\n\nmodule.exports.makeDir = async (dir, options) => {\n checkPath(dir)\n\n return fs.mkdir(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n\nmodule.exports.makeDirSync = (dir, options) => {\n checkPath(dir)\n\n return fs.mkdirSync(dir, {\n mode: getMode(options),\n recursive: true\n })\n}\n","// Adapted from https://github.com/sindresorhus/make-dir\n// Copyright (c) Sindre Sorhus (sindresorhus.com)\n// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n'use strict'\nconst path = require('path')\n\n// https://github.com/nodejs/node/issues/8987\n// https://github.com/libuv/libuv/pull/1088\nmodule.exports.checkPath = function checkPath (pth) {\n if (process.platform === 'win32') {\n const pathHasInvalidWinCharacters = /[<>:\"|?*]/.test(pth.replace(path.parse(pth).root, ''))\n\n if (pathHasInvalidWinCharacters) {\n const error = new Error(`Path contains invalid characters: ${pth}`)\n error.code = 'EINVAL'\n throw error\n }\n }\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nmodule.exports = {\n move: u(require('./move')),\n moveSync: require('./move-sync')\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copySync = require('../copy').copySync\nconst removeSync = require('../remove').removeSync\nconst mkdirpSync = require('../mkdirs').mkdirpSync\nconst stat = require('../util/stat')\n\nfunction moveSync (src, dest, opts) {\n opts = opts || {}\n const overwrite = opts.overwrite || opts.clobber || false\n\n const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)\n stat.checkParentPathsSync(src, srcStat, dest, 'move')\n if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))\n return doRename(src, dest, overwrite, isChangingCase)\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase) {\n if (isChangingCase) return rename(src, dest, overwrite)\n if (overwrite) {\n removeSync(dest)\n return rename(src, dest, overwrite)\n }\n if (fs.existsSync(dest)) throw new Error('dest already exists.')\n return rename(src, dest, overwrite)\n}\n\nfunction rename (src, dest, overwrite) {\n try {\n fs.renameSync(src, dest)\n } catch (err) {\n if (err.code !== 'EXDEV') throw err\n return moveAcrossDevice(src, dest, overwrite)\n }\n}\n\nfunction moveAcrossDevice (src, dest, overwrite) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copySync(src, dest, opts)\n return removeSync(src)\n}\n\nmodule.exports = moveSync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copy = require('../copy').copy\nconst remove = require('../remove').remove\nconst mkdirp = require('../mkdirs').mkdirp\nconst pathExists = require('../path-exists').pathExists\nconst stat = require('../util/stat')\n\nfunction move (src, dest, opts, cb) {\n if (typeof opts === 'function') {\n cb = opts\n opts = {}\n }\n\n opts = opts || {}\n\n const overwrite = opts.overwrite || opts.clobber || false\n\n stat.checkPaths(src, dest, 'move', opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, isChangingCase = false } = stats\n stat.checkParentPaths(src, srcStat, dest, 'move', err => {\n if (err) return cb(err)\n if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb)\n mkdirp(path.dirname(dest), err => {\n if (err) return cb(err)\n return doRename(src, dest, overwrite, isChangingCase, cb)\n })\n })\n })\n}\n\nfunction isParentRoot (dest) {\n const parent = path.dirname(dest)\n const parsedPath = path.parse(parent)\n return parsedPath.root === parent\n}\n\nfunction doRename (src, dest, overwrite, isChangingCase, cb) {\n if (isChangingCase) return rename(src, dest, overwrite, cb)\n if (overwrite) {\n return remove(dest, err => {\n if (err) return cb(err)\n return rename(src, dest, overwrite, cb)\n })\n }\n pathExists(dest, (err, destExists) => {\n if (err) return cb(err)\n if (destExists) return cb(new Error('dest already exists.'))\n return rename(src, dest, overwrite, cb)\n })\n}\n\nfunction rename (src, dest, overwrite, cb) {\n fs.rename(src, dest, err => {\n if (!err) return cb()\n if (err.code !== 'EXDEV') return cb(err)\n return moveAcrossDevice(src, dest, overwrite, cb)\n })\n}\n\nfunction moveAcrossDevice (src, dest, overwrite, cb) {\n const opts = {\n overwrite,\n errorOnExist: true\n }\n copy(src, dest, opts, err => {\n if (err) return cb(err)\n return remove(src, cb)\n })\n}\n\nmodule.exports = move\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction outputFile (file, data, encoding, callback) {\n if (typeof encoding === 'function') {\n callback = encoding\n encoding = 'utf8'\n }\n\n const dir = path.dirname(file)\n pathExists(dir, (err, itDoes) => {\n if (err) return callback(err)\n if (itDoes) return fs.writeFile(file, data, encoding, callback)\n\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n\n fs.writeFile(file, data, encoding, callback)\n })\n })\n}\n\nfunction outputFileSync (file, ...args) {\n const dir = path.dirname(file)\n if (fs.existsSync(dir)) {\n return fs.writeFileSync(file, ...args)\n }\n mkdir.mkdirsSync(dir)\n fs.writeFileSync(file, ...args)\n}\n\nmodule.exports = {\n outputFile: u(outputFile),\n outputFileSync\n}\n","'use strict'\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\n\nfunction pathExists (path) {\n return fs.access(path).then(() => true).catch(() => false)\n}\n\nmodule.exports = {\n pathExists: u(pathExists),\n pathExistsSync: fs.existsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst u = require('universalify').fromCallback\nconst rimraf = require('./rimraf')\n\nfunction remove (path, callback) {\n // Node 14.14.0+\n if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback)\n rimraf(path, callback)\n}\n\nfunction removeSync (path) {\n // Node 14.14.0+\n if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true })\n rimraf.sync(path)\n}\n\nmodule.exports = {\n remove: u(remove),\n removeSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst assert = require('assert')\n\nconst isWindows = (process.platform === 'win32')\n\nfunction defaults (options) {\n const methods = [\n 'unlink',\n 'chmod',\n 'stat',\n 'lstat',\n 'rmdir',\n 'readdir'\n ]\n methods.forEach(m => {\n options[m] = options[m] || fs[m]\n m = m + 'Sync'\n options[m] = options[m] || fs[m]\n })\n\n options.maxBusyTries = options.maxBusyTries || 3\n}\n\nfunction rimraf (p, options, cb) {\n let busyTries = 0\n\n if (typeof options === 'function') {\n cb = options\n options = {}\n }\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')\n assert(options, 'rimraf: invalid options argument provided')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n defaults(options)\n\n rimraf_(p, options, function CB (er) {\n if (er) {\n if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&\n busyTries < options.maxBusyTries) {\n busyTries++\n const time = busyTries * 100\n // try again, with the same exact callback as this one.\n return setTimeout(() => rimraf_(p, options, CB), time)\n }\n\n // already gone\n if (er.code === 'ENOENT') er = null\n }\n\n cb(er)\n })\n}\n\n// Two possible strategies.\n// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR\n// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR\n//\n// Both result in an extra syscall when you guess wrong. However, there\n// are likely far more normal files in the world than directories. This\n// is based on the assumption that a the average number of files per\n// directory is >= 1.\n//\n// If anyone ever complains about this, then I guess the strategy could\n// be made configurable somehow. But until then, YAGNI.\nfunction rimraf_ (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // sunos lets the root user unlink directories, which is... weird.\n // so we have to lstat here and make sure it's not a dir.\n options.lstat(p, (er, st) => {\n if (er && er.code === 'ENOENT') {\n return cb(null)\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er && er.code === 'EPERM' && isWindows) {\n return fixWinEPERM(p, options, er, cb)\n }\n\n if (st && st.isDirectory()) {\n return rmdir(p, options, er, cb)\n }\n\n options.unlink(p, er => {\n if (er) {\n if (er.code === 'ENOENT') {\n return cb(null)\n }\n if (er.code === 'EPERM') {\n return (isWindows)\n ? fixWinEPERM(p, options, er, cb)\n : rmdir(p, options, er, cb)\n }\n if (er.code === 'EISDIR') {\n return rmdir(p, options, er, cb)\n }\n }\n return cb(er)\n })\n })\n}\n\nfunction fixWinEPERM (p, options, er, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.chmod(p, 0o666, er2 => {\n if (er2) {\n cb(er2.code === 'ENOENT' ? null : er)\n } else {\n options.stat(p, (er3, stats) => {\n if (er3) {\n cb(er3.code === 'ENOENT' ? null : er)\n } else if (stats.isDirectory()) {\n rmdir(p, options, er, cb)\n } else {\n options.unlink(p, cb)\n }\n })\n }\n })\n}\n\nfunction fixWinEPERMSync (p, options, er) {\n let stats\n\n assert(p)\n assert(options)\n\n try {\n options.chmodSync(p, 0o666)\n } catch (er2) {\n if (er2.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n try {\n stats = options.statSync(p)\n } catch (er3) {\n if (er3.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n if (stats.isDirectory()) {\n rmdirSync(p, options, er)\n } else {\n options.unlinkSync(p)\n }\n}\n\nfunction rmdir (p, options, originalEr, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)\n // if we guessed wrong, and it's not a directory, then\n // raise the original error.\n options.rmdir(p, er => {\n if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {\n rmkids(p, options, cb)\n } else if (er && er.code === 'ENOTDIR') {\n cb(originalEr)\n } else {\n cb(er)\n }\n })\n}\n\nfunction rmkids (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.readdir(p, (er, files) => {\n if (er) return cb(er)\n\n let n = files.length\n let errState\n\n if (n === 0) return options.rmdir(p, cb)\n\n files.forEach(f => {\n rimraf(path.join(p, f), options, er => {\n if (errState) {\n return\n }\n if (er) return cb(errState = er)\n if (--n === 0) {\n options.rmdir(p, cb)\n }\n })\n })\n })\n}\n\n// this looks simpler, and is strictly *faster*, but will\n// tie up the JavaScript thread and fail on excessively\n// deep directory trees.\nfunction rimrafSync (p, options) {\n let st\n\n options = options || {}\n defaults(options)\n\n assert(p, 'rimraf: missing path')\n assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')\n assert(options, 'rimraf: missing options')\n assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')\n\n try {\n st = options.lstatSync(p)\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er.code === 'EPERM' && isWindows) {\n fixWinEPERMSync(p, options, er)\n }\n }\n\n try {\n // sunos lets the root user unlink directories, which is... weird.\n if (st && st.isDirectory()) {\n rmdirSync(p, options, null)\n } else {\n options.unlinkSync(p)\n }\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n } else if (er.code === 'EPERM') {\n return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)\n } else if (er.code !== 'EISDIR') {\n throw er\n }\n rmdirSync(p, options, er)\n }\n}\n\nfunction rmdirSync (p, options, originalEr) {\n assert(p)\n assert(options)\n\n try {\n options.rmdirSync(p)\n } catch (er) {\n if (er.code === 'ENOTDIR') {\n throw originalEr\n } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {\n rmkidsSync(p, options)\n } else if (er.code !== 'ENOENT') {\n throw er\n }\n }\n}\n\nfunction rmkidsSync (p, options) {\n assert(p)\n assert(options)\n options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))\n\n if (isWindows) {\n // We only end up here once we got ENOTEMPTY at least once, and\n // at this point, we are guaranteed to have removed all the kids.\n // So, we know that it won't be ENOENT or ENOTDIR or anything else.\n // try really hard to delete stuff on windows, because it has a\n // PROFOUNDLY annoying habit of not closing handles promptly when\n // files are deleted, resulting in spurious ENOTEMPTY errors.\n const startTime = Date.now()\n do {\n try {\n const ret = options.rmdirSync(p, options)\n return ret\n } catch {}\n } while (Date.now() - startTime < 500) // give up after 500ms\n } else {\n const ret = options.rmdirSync(p, options)\n return ret\n }\n}\n\nmodule.exports = rimraf\nrimraf.sync = rimrafSync\n","'use strict'\n\nconst fs = require('../fs')\nconst path = require('path')\nconst util = require('util')\n\nfunction getStats (src, dest, opts) {\n const statFunc = opts.dereference\n ? (file) => fs.stat(file, { bigint: true })\n : (file) => fs.lstat(file, { bigint: true })\n return Promise.all([\n statFunc(src),\n statFunc(dest).catch(err => {\n if (err.code === 'ENOENT') return null\n throw err\n })\n ]).then(([srcStat, destStat]) => ({ srcStat, destStat }))\n}\n\nfunction getStatsSync (src, dest, opts) {\n let destStat\n const statFunc = opts.dereference\n ? (file) => fs.statSync(file, { bigint: true })\n : (file) => fs.lstatSync(file, { bigint: true })\n const srcStat = statFunc(src)\n try {\n destStat = statFunc(dest)\n } catch (err) {\n if (err.code === 'ENOENT') return { srcStat, destStat: null }\n throw err\n }\n return { srcStat, destStat }\n}\n\nfunction checkPaths (src, dest, funcName, opts, cb) {\n util.callbackify(getStats)(src, dest, opts, (err, stats) => {\n if (err) return cb(err)\n const { srcStat, destStat } = stats\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return cb(null, { srcStat, destStat, isChangingCase: true })\n }\n return cb(new Error('Source and destination must not be the same.'))\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`))\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return cb(null, { srcStat, destStat })\n })\n}\n\nfunction checkPathsSync (src, dest, funcName, opts) {\n const { srcStat, destStat } = getStatsSync(src, dest, opts)\n\n if (destStat) {\n if (areIdentical(srcStat, destStat)) {\n const srcBaseName = path.basename(src)\n const destBaseName = path.basename(dest)\n if (funcName === 'move' &&\n srcBaseName !== destBaseName &&\n srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {\n return { srcStat, destStat, isChangingCase: true }\n }\n throw new Error('Source and destination must not be the same.')\n }\n if (srcStat.isDirectory() && !destStat.isDirectory()) {\n throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)\n }\n if (!srcStat.isDirectory() && destStat.isDirectory()) {\n throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)\n }\n }\n\n if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return { srcStat, destStat }\n}\n\n// recursively check if dest parent is a subdirectory of src.\n// It works for all file types including symlinks since it\n// checks the src and dest inodes. It starts from the deepest\n// parent and stops once it reaches the src parent or the root path.\nfunction checkParentPaths (src, srcStat, dest, funcName, cb) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()\n fs.stat(destParent, { bigint: true }, (err, destStat) => {\n if (err) {\n if (err.code === 'ENOENT') return cb()\n return cb(err)\n }\n if (areIdentical(srcStat, destStat)) {\n return cb(new Error(errMsg(src, dest, funcName)))\n }\n return checkParentPaths(src, srcStat, destParent, funcName, cb)\n })\n}\n\nfunction checkParentPathsSync (src, srcStat, dest, funcName) {\n const srcParent = path.resolve(path.dirname(src))\n const destParent = path.resolve(path.dirname(dest))\n if (destParent === srcParent || destParent === path.parse(destParent).root) return\n let destStat\n try {\n destStat = fs.statSync(destParent, { bigint: true })\n } catch (err) {\n if (err.code === 'ENOENT') return\n throw err\n }\n if (areIdentical(srcStat, destStat)) {\n throw new Error(errMsg(src, dest, funcName))\n }\n return checkParentPathsSync(src, srcStat, destParent, funcName)\n}\n\nfunction areIdentical (srcStat, destStat) {\n return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev\n}\n\n// return true if dest is a subdir of src, otherwise false.\n// It only checks the path strings.\nfunction isSrcSubdir (src, dest) {\n const srcArr = path.resolve(src).split(path.sep).filter(i => i)\n const destArr = path.resolve(dest).split(path.sep).filter(i => i)\n return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)\n}\n\nfunction errMsg (src, dest, funcName) {\n return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`\n}\n\nmodule.exports = {\n checkPaths,\n checkPathsSync,\n checkParentPaths,\n checkParentPathsSync,\n isSrcSubdir,\n areIdentical\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction utimesMillis (path, atime, mtime, callback) {\n // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)\n fs.open(path, 'r+', (err, fd) => {\n if (err) return callback(err)\n fs.futimes(fd, atime, mtime, futimesErr => {\n fs.close(fd, closeErr => {\n if (callback) callback(futimesErr || closeErr)\n })\n })\n })\n}\n\nfunction utimesMillisSync (path, atime, mtime) {\n const fd = fs.openSync(path, 'r+')\n fs.futimesSync(fd, atime, mtime)\n return fs.closeSync(fd)\n}\n\nmodule.exports = {\n utimesMillis,\n utimesMillisSync\n}\n","'use strict'\n\nmodule.exports = clone\n\nvar getPrototypeOf = Object.getPrototypeOf || function (obj) {\n return obj.__proto__\n}\n\nfunction clone (obj) {\n if (obj === null || typeof obj !== 'object')\n return obj\n\n if (obj instanceof Object)\n var copy = { __proto__: getPrototypeOf(obj) }\n else\n var copy = Object.create(null)\n\n Object.getOwnPropertyNames(obj).forEach(function (key) {\n Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))\n })\n\n return copy\n}\n","var fs = require('fs')\nvar polyfills = require('./polyfills.js')\nvar legacy = require('./legacy-streams.js')\nvar clone = require('./clone.js')\n\nvar util = require('util')\n\n/* istanbul ignore next - node 0.x polyfill */\nvar gracefulQueue\nvar previousSymbol\n\n/* istanbul ignore else - node 0.x polyfill */\nif (typeof Symbol === 'function' && typeof Symbol.for === 'function') {\n gracefulQueue = Symbol.for('graceful-fs.queue')\n // This is used in testing by future versions\n previousSymbol = Symbol.for('graceful-fs.previous')\n} else {\n gracefulQueue = '___graceful-fs.queue'\n previousSymbol = '___graceful-fs.previous'\n}\n\nfunction noop () {}\n\nfunction publishQueue(context, queue) {\n Object.defineProperty(context, gracefulQueue, {\n get: function() {\n return queue\n }\n })\n}\n\nvar debug = noop\nif (util.debuglog)\n debug = util.debuglog('gfs4')\nelse if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || ''))\n debug = function() {\n var m = util.format.apply(util, arguments)\n m = 'GFS4: ' + m.split(/\\n/).join('\\nGFS4: ')\n console.error(m)\n }\n\n// Once time initialization\nif (!fs[gracefulQueue]) {\n // This queue can be shared by multiple loaded instances\n var queue = global[gracefulQueue] || []\n publishQueue(fs, queue)\n\n // Patch fs.close/closeSync to shared queue version, because we need\n // to retry() whenever a close happens *anywhere* in the program.\n // This is essential when multiple graceful-fs instances are\n // in play at the same time.\n fs.close = (function (fs$close) {\n function close (fd, cb) {\n return fs$close.call(fs, fd, function (err) {\n // This function uses the graceful-fs shared queue\n if (!err) {\n resetQueue()\n }\n\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n })\n }\n\n Object.defineProperty(close, previousSymbol, {\n value: fs$close\n })\n return close\n })(fs.close)\n\n fs.closeSync = (function (fs$closeSync) {\n function closeSync (fd) {\n // This function uses the graceful-fs shared queue\n fs$closeSync.apply(fs, arguments)\n resetQueue()\n }\n\n Object.defineProperty(closeSync, previousSymbol, {\n value: fs$closeSync\n })\n return closeSync\n })(fs.closeSync)\n\n if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || '')) {\n process.on('exit', function() {\n debug(fs[gracefulQueue])\n require('assert').equal(fs[gracefulQueue].length, 0)\n })\n }\n}\n\nif (!global[gracefulQueue]) {\n publishQueue(global, fs[gracefulQueue]);\n}\n\nmodule.exports = patch(clone(fs))\nif (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {\n module.exports = patch(fs)\n fs.__patched = true;\n}\n\nfunction patch (fs) {\n // Everything that references the open() function needs to be in here\n polyfills(fs)\n fs.gracefulify = patch\n\n fs.createReadStream = createReadStream\n fs.createWriteStream = createWriteStream\n var fs$readFile = fs.readFile\n fs.readFile = readFile\n function readFile (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$readFile(path, options, cb)\n\n function go$readFile (path, options, cb, startTime) {\n return fs$readFile(path, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$writeFile = fs.writeFile\n fs.writeFile = writeFile\n function writeFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$writeFile(path, data, options, cb)\n\n function go$writeFile (path, data, options, cb, startTime) {\n return fs$writeFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$appendFile = fs.appendFile\n if (fs$appendFile)\n fs.appendFile = appendFile\n function appendFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$appendFile(path, data, options, cb)\n\n function go$appendFile (path, data, options, cb, startTime) {\n return fs$appendFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$copyFile = fs.copyFile\n if (fs$copyFile)\n fs.copyFile = copyFile\n function copyFile (src, dest, flags, cb) {\n if (typeof flags === 'function') {\n cb = flags\n flags = 0\n }\n return go$copyFile(src, dest, flags, cb)\n\n function go$copyFile (src, dest, flags, cb, startTime) {\n return fs$copyFile(src, dest, flags, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n var fs$readdir = fs.readdir\n fs.readdir = readdir\n var noReaddirOptionVersions = /^v[0-5]\\./\n function readdir (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n var go$readdir = noReaddirOptionVersions.test(process.version)\n ? function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n : function go$readdir (path, options, cb, startTime) {\n return fs$readdir(path, options, fs$readdirCallback(\n path, options, cb, startTime\n ))\n }\n\n return go$readdir(path, options, cb)\n\n function fs$readdirCallback (path, options, cb, startTime) {\n return function (err, files) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([\n go$readdir,\n [path, options, cb],\n err,\n startTime || Date.now(),\n Date.now()\n ])\n else {\n if (files && files.sort)\n files.sort()\n\n if (typeof cb === 'function')\n cb.call(this, err, files)\n }\n }\n }\n }\n\n if (process.version.substr(0, 4) === 'v0.8') {\n var legStreams = legacy(fs)\n ReadStream = legStreams.ReadStream\n WriteStream = legStreams.WriteStream\n }\n\n var fs$ReadStream = fs.ReadStream\n if (fs$ReadStream) {\n ReadStream.prototype = Object.create(fs$ReadStream.prototype)\n ReadStream.prototype.open = ReadStream$open\n }\n\n var fs$WriteStream = fs.WriteStream\n if (fs$WriteStream) {\n WriteStream.prototype = Object.create(fs$WriteStream.prototype)\n WriteStream.prototype.open = WriteStream$open\n }\n\n Object.defineProperty(fs, 'ReadStream', {\n get: function () {\n return ReadStream\n },\n set: function (val) {\n ReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n Object.defineProperty(fs, 'WriteStream', {\n get: function () {\n return WriteStream\n },\n set: function (val) {\n WriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n // legacy names\n var FileReadStream = ReadStream\n Object.defineProperty(fs, 'FileReadStream', {\n get: function () {\n return FileReadStream\n },\n set: function (val) {\n FileReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n var FileWriteStream = WriteStream\n Object.defineProperty(fs, 'FileWriteStream', {\n get: function () {\n return FileWriteStream\n },\n set: function (val) {\n FileWriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n function ReadStream (path, options) {\n if (this instanceof ReadStream)\n return fs$ReadStream.apply(this, arguments), this\n else\n return ReadStream.apply(Object.create(ReadStream.prototype), arguments)\n }\n\n function ReadStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n if (that.autoClose)\n that.destroy()\n\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n that.read()\n }\n })\n }\n\n function WriteStream (path, options) {\n if (this instanceof WriteStream)\n return fs$WriteStream.apply(this, arguments), this\n else\n return WriteStream.apply(Object.create(WriteStream.prototype), arguments)\n }\n\n function WriteStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n that.destroy()\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n }\n })\n }\n\n function createReadStream (path, options) {\n return new fs.ReadStream(path, options)\n }\n\n function createWriteStream (path, options) {\n return new fs.WriteStream(path, options)\n }\n\n var fs$open = fs.open\n fs.open = open\n function open (path, flags, mode, cb) {\n if (typeof mode === 'function')\n cb = mode, mode = null\n\n return go$open(path, flags, mode, cb)\n\n function go$open (path, flags, mode, cb, startTime) {\n return fs$open(path, flags, mode, function (err, fd) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n }\n })\n }\n }\n\n return fs\n}\n\nfunction enqueue (elem) {\n debug('ENQUEUE', elem[0].name, elem[1])\n fs[gracefulQueue].push(elem)\n retry()\n}\n\n// keep track of the timeout between retry() calls\nvar retryTimer\n\n// reset the startTime and lastTime to now\n// this resets the start of the 60 second overall timeout as well as the\n// delay between attempts so that we'll retry these jobs sooner\nfunction resetQueue () {\n var now = Date.now()\n for (var i = 0; i < fs[gracefulQueue].length; ++i) {\n // entries that are only a length of 2 are from an older version, don't\n // bother modifying those since they'll be retried anyway.\n if (fs[gracefulQueue][i].length > 2) {\n fs[gracefulQueue][i][3] = now // startTime\n fs[gracefulQueue][i][4] = now // lastTime\n }\n }\n // call retry to make sure we're actively processing the queue\n retry()\n}\n\nfunction retry () {\n // clear the timer and remove it to help prevent unintended concurrency\n clearTimeout(retryTimer)\n retryTimer = undefined\n\n if (fs[gracefulQueue].length === 0)\n return\n\n var elem = fs[gracefulQueue].shift()\n var fn = elem[0]\n var args = elem[1]\n // these items may be unset if they were added by an older graceful-fs\n var err = elem[2]\n var startTime = elem[3]\n var lastTime = elem[4]\n\n // if we don't have a startTime we have no way of knowing if we've waited\n // long enough, so go ahead and retry this item now\n if (startTime === undefined) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args)\n } else if (Date.now() - startTime >= 60000) {\n // it's been more than 60 seconds total, bail now\n debug('TIMEOUT', fn.name, args)\n var cb = args.pop()\n if (typeof cb === 'function')\n cb.call(null, err)\n } else {\n // the amount of time between the last attempt and right now\n var sinceAttempt = Date.now() - lastTime\n // the amount of time between when we first tried, and when we last tried\n // rounded up to at least 1\n var sinceStart = Math.max(lastTime - startTime, 1)\n // backoff. wait longer than the total time we've been retrying, but only\n // up to a maximum of 100ms\n var desiredDelay = Math.min(sinceStart * 1.2, 100)\n // it's been long enough since the last retry, do it again\n if (sinceAttempt >= desiredDelay) {\n debug('RETRY', fn.name, args)\n fn.apply(null, args.concat([startTime]))\n } else {\n // if we can't do this job yet, push it to the end of the queue\n // and let the next iteration check again\n fs[gracefulQueue].push(elem)\n }\n }\n\n // schedule our next run if one isn't already scheduled\n if (retryTimer === undefined) {\n retryTimer = setTimeout(retry, 0)\n }\n}\n","var Stream = require('stream').Stream\n\nmodule.exports = legacy\n\nfunction legacy (fs) {\n return {\n ReadStream: ReadStream,\n WriteStream: WriteStream\n }\n\n function ReadStream (path, options) {\n if (!(this instanceof ReadStream)) return new ReadStream(path, options);\n\n Stream.call(this);\n\n var self = this;\n\n this.path = path;\n this.fd = null;\n this.readable = true;\n this.paused = false;\n\n this.flags = 'r';\n this.mode = 438; /*=0666*/\n this.bufferSize = 64 * 1024;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.encoding) this.setEncoding(this.encoding);\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.end === undefined) {\n this.end = Infinity;\n } else if ('number' !== typeof this.end) {\n throw TypeError('end must be a Number');\n }\n\n if (this.start > this.end) {\n throw new Error('start must be <= end');\n }\n\n this.pos = this.start;\n }\n\n if (this.fd !== null) {\n process.nextTick(function() {\n self._read();\n });\n return;\n }\n\n fs.open(this.path, this.flags, this.mode, function (err, fd) {\n if (err) {\n self.emit('error', err);\n self.readable = false;\n return;\n }\n\n self.fd = fd;\n self.emit('open', fd);\n self._read();\n })\n }\n\n function WriteStream (path, options) {\n if (!(this instanceof WriteStream)) return new WriteStream(path, options);\n\n Stream.call(this);\n\n this.path = path;\n this.fd = null;\n this.writable = true;\n\n this.flags = 'w';\n this.encoding = 'binary';\n this.mode = 438; /*=0666*/\n this.bytesWritten = 0;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.start < 0) {\n throw new Error('start must be >= zero');\n }\n\n this.pos = this.start;\n }\n\n this.busy = false;\n this._queue = [];\n\n if (this.fd === null) {\n this._open = fs.open;\n this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);\n this.flush();\n }\n }\n}\n","var constants = require('constants')\n\nvar origCwd = process.cwd\nvar cwd = null\n\nvar platform = process.env.GRACEFUL_FS_PLATFORM || process.platform\n\nprocess.cwd = function() {\n if (!cwd)\n cwd = origCwd.call(process)\n return cwd\n}\ntry {\n process.cwd()\n} catch (er) {}\n\n// This check is needed until node.js 12 is required\nif (typeof process.chdir === 'function') {\n var chdir = process.chdir\n process.chdir = function (d) {\n cwd = null\n chdir.call(process, d)\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)\n}\n\nmodule.exports = patch\n\nfunction patch (fs) {\n // (re-)implement some things that are known busted or missing.\n\n // lchmod, broken prior to 0.6.2\n // back-port the fix here.\n if (constants.hasOwnProperty('O_SYMLINK') &&\n process.version.match(/^v0\\.6\\.[0-2]|^v0\\.5\\./)) {\n patchLchmod(fs)\n }\n\n // lutimes implementation, or no-op\n if (!fs.lutimes) {\n patchLutimes(fs)\n }\n\n // https://github.com/isaacs/node-graceful-fs/issues/4\n // Chown should not fail on einval or eperm if non-root.\n // It should not fail on enosys ever, as this just indicates\n // that a fs doesn't support the intended operation.\n\n fs.chown = chownFix(fs.chown)\n fs.fchown = chownFix(fs.fchown)\n fs.lchown = chownFix(fs.lchown)\n\n fs.chmod = chmodFix(fs.chmod)\n fs.fchmod = chmodFix(fs.fchmod)\n fs.lchmod = chmodFix(fs.lchmod)\n\n fs.chownSync = chownFixSync(fs.chownSync)\n fs.fchownSync = chownFixSync(fs.fchownSync)\n fs.lchownSync = chownFixSync(fs.lchownSync)\n\n fs.chmodSync = chmodFixSync(fs.chmodSync)\n fs.fchmodSync = chmodFixSync(fs.fchmodSync)\n fs.lchmodSync = chmodFixSync(fs.lchmodSync)\n\n fs.stat = statFix(fs.stat)\n fs.fstat = statFix(fs.fstat)\n fs.lstat = statFix(fs.lstat)\n\n fs.statSync = statFixSync(fs.statSync)\n fs.fstatSync = statFixSync(fs.fstatSync)\n fs.lstatSync = statFixSync(fs.lstatSync)\n\n // if lchmod/lchown do not exist, then make them no-ops\n if (fs.chmod && !fs.lchmod) {\n fs.lchmod = function (path, mode, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchmodSync = function () {}\n }\n if (fs.chown && !fs.lchown) {\n fs.lchown = function (path, uid, gid, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchownSync = function () {}\n }\n\n // on Windows, A/V software can lock the directory, causing this\n // to fail with an EACCES or EPERM if the directory contains newly\n // created files. Try again on failure, for up to 60 seconds.\n\n // Set the timeout this long because some Windows Anti-Virus, such as Parity\n // bit9, may lock files for up to a minute, causing npm package install\n // failures. Also, take care to yield the scheduler. Windows scheduling gives\n // CPU to a busy looping process, which can cause the program causing the lock\n // contention to be starved of CPU by node, so the contention doesn't resolve.\n if (platform === \"win32\") {\n fs.rename = typeof fs.rename !== 'function' ? fs.rename\n : (function (fs$rename) {\n function rename (from, to, cb) {\n var start = Date.now()\n var backoff = 0;\n fs$rename(from, to, function CB (er) {\n if (er\n && (er.code === \"EACCES\" || er.code === \"EPERM\")\n && Date.now() - start < 60000) {\n setTimeout(function() {\n fs.stat(to, function (stater, st) {\n if (stater && stater.code === \"ENOENT\")\n fs$rename(from, to, CB);\n else\n cb(er)\n })\n }, backoff)\n if (backoff < 100)\n backoff += 10;\n return;\n }\n if (cb) cb(er)\n })\n }\n if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)\n return rename\n })(fs.rename)\n }\n\n // if read() returns EAGAIN, then just try it again.\n fs.read = typeof fs.read !== 'function' ? fs.read\n : (function (fs$read) {\n function read (fd, buffer, offset, length, position, callback_) {\n var callback\n if (callback_ && typeof callback_ === 'function') {\n var eagCounter = 0\n callback = function (er, _, __) {\n if (er && er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n callback_.apply(this, arguments)\n }\n }\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n\n // This ensures `util.promisify` works as it does for native `fs.read`.\n if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)\n return read\n })(fs.read)\n\n fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync\n : (function (fs$readSync) { return function (fd, buffer, offset, length, position) {\n var eagCounter = 0\n while (true) {\n try {\n return fs$readSync.call(fs, fd, buffer, offset, length, position)\n } catch (er) {\n if (er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n continue\n }\n throw er\n }\n }\n }})(fs.readSync)\n\n function patchLchmod (fs) {\n fs.lchmod = function (path, mode, callback) {\n fs.open( path\n , constants.O_WRONLY | constants.O_SYMLINK\n , mode\n , function (err, fd) {\n if (err) {\n if (callback) callback(err)\n return\n }\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n fs.fchmod(fd, mode, function (err) {\n fs.close(fd, function(err2) {\n if (callback) callback(err || err2)\n })\n })\n })\n }\n\n fs.lchmodSync = function (path, mode) {\n var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)\n\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n var threw = true\n var ret\n try {\n ret = fs.fchmodSync(fd, mode)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n }\n\n function patchLutimes (fs) {\n if (constants.hasOwnProperty(\"O_SYMLINK\") && fs.futimes) {\n fs.lutimes = function (path, at, mt, cb) {\n fs.open(path, constants.O_SYMLINK, function (er, fd) {\n if (er) {\n if (cb) cb(er)\n return\n }\n fs.futimes(fd, at, mt, function (er) {\n fs.close(fd, function (er2) {\n if (cb) cb(er || er2)\n })\n })\n })\n }\n\n fs.lutimesSync = function (path, at, mt) {\n var fd = fs.openSync(path, constants.O_SYMLINK)\n var ret\n var threw = true\n try {\n ret = fs.futimesSync(fd, at, mt)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n\n } else if (fs.futimes) {\n fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }\n fs.lutimesSync = function () {}\n }\n }\n\n function chmodFix (orig) {\n if (!orig) return orig\n return function (target, mode, cb) {\n return orig.call(fs, target, mode, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chmodFixSync (orig) {\n if (!orig) return orig\n return function (target, mode) {\n try {\n return orig.call(fs, target, mode)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n\n function chownFix (orig) {\n if (!orig) return orig\n return function (target, uid, gid, cb) {\n return orig.call(fs, target, uid, gid, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chownFixSync (orig) {\n if (!orig) return orig\n return function (target, uid, gid) {\n try {\n return orig.call(fs, target, uid, gid)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n function statFix (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n function callback (er, stats) {\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n if (cb) cb.apply(this, arguments)\n }\n return options ? orig.call(fs, target, options, callback)\n : orig.call(fs, target, callback)\n }\n }\n\n function statFixSync (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options) {\n var stats = options ? orig.call(fs, target, options)\n : orig.call(fs, target)\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n return stats;\n }\n }\n\n // ENOSYS means that the fs doesn't support the op. Just ignore\n // that, because it doesn't matter.\n //\n // if there's no getuid, or if getuid() is something other\n // than 0, and the error is EINVAL or EPERM, then just ignore\n // it.\n //\n // This specific case is a silent failure in cp, install, tar,\n // and most other unix tools that manage permissions.\n //\n // When running as root, or if other types of errors are\n // encountered, then it's strict.\n function chownErOk (er) {\n if (!er)\n return true\n\n if (er.code === \"ENOSYS\")\n return true\n\n var nonroot = !process.getuid || process.getuid() !== 0\n if (nonroot) {\n if (er.code === \"EINVAL\" || er.code === \"EPERM\")\n return true\n }\n\n return false\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","let _fs\ntry {\n _fs = require('graceful-fs')\n} catch (_) {\n _fs = require('fs')\n}\nconst universalify = require('universalify')\nconst { stringify, stripBom } = require('./utils')\n\nasync function _readFile (file, options = {}) {\n if (typeof options === 'string') {\n options = { encoding: options }\n }\n\n const fs = options.fs || _fs\n\n const shouldThrow = 'throws' in options ? options.throws : true\n\n let data = await universalify.fromCallback(fs.readFile)(file, options)\n\n data = stripBom(data)\n\n let obj\n try {\n obj = JSON.parse(data, options ? options.reviver : null)\n } catch (err) {\n if (shouldThrow) {\n err.message = `${file}: ${err.message}`\n throw err\n } else {\n return null\n }\n }\n\n return obj\n}\n\nconst readFile = universalify.fromPromise(_readFile)\n\nfunction readFileSync (file, options = {}) {\n if (typeof options === 'string') {\n options = { encoding: options }\n }\n\n const fs = options.fs || _fs\n\n const shouldThrow = 'throws' in options ? options.throws : true\n\n try {\n let content = fs.readFileSync(file, options)\n content = stripBom(content)\n return JSON.parse(content, options.reviver)\n } catch (err) {\n if (shouldThrow) {\n err.message = `${file}: ${err.message}`\n throw err\n } else {\n return null\n }\n }\n}\n\nasync function _writeFile (file, obj, options = {}) {\n const fs = options.fs || _fs\n\n const str = stringify(obj, options)\n\n await universalify.fromCallback(fs.writeFile)(file, str, options)\n}\n\nconst writeFile = universalify.fromPromise(_writeFile)\n\nfunction writeFileSync (file, obj, options = {}) {\n const fs = options.fs || _fs\n\n const str = stringify(obj, options)\n // not sure if fs.writeFileSync returns anything, but just in case\n return fs.writeFileSync(file, str, options)\n}\n\nconst jsonfile = {\n readFile,\n readFileSync,\n writeFile,\n writeFileSync\n}\n\nmodule.exports = jsonfile\n","function stringify (obj, { EOL = '\\n', finalEOL = true, replacer = null, spaces } = {}) {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(obj, replacer, spaces)\n\n return str.replace(/\\n/g, EOL) + EOF\n}\n\nfunction stripBom (content) {\n // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified\n if (Buffer.isBuffer(content)) content = content.toString('utf8')\n return content.replace(/^\\uFEFF/, '')\n}\n\nmodule.exports = { stringify, stripBom }\n","'use strict'\n\n// A linked list to keep track of recently-used-ness\nconst Yallist = require('yallist')\n\nconst MAX = Symbol('max')\nconst LENGTH = Symbol('length')\nconst LENGTH_CALCULATOR = Symbol('lengthCalculator')\nconst ALLOW_STALE = Symbol('allowStale')\nconst MAX_AGE = Symbol('maxAge')\nconst DISPOSE = Symbol('dispose')\nconst NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')\nconst LRU_LIST = Symbol('lruList')\nconst CACHE = Symbol('cache')\nconst UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')\n\nconst naiveLength = () => 1\n\n// lruList is a yallist where the head is the youngest\n// item, and the tail is the oldest. the list contains the Hit\n// objects as the entries.\n// Each Hit object has a reference to its Yallist.Node. This\n// never changes.\n//\n// cache is a Map (or PseudoMap) that matches the keys to\n// the Yallist.Node object.\nclass LRUCache {\n constructor (options) {\n if (typeof options === 'number')\n options = { max: options }\n\n if (!options)\n options = {}\n\n if (options.max && (typeof options.max !== 'number' || options.max < 0))\n throw new TypeError('max must be a non-negative number')\n // Kind of weird to have a default max of Infinity, but oh well.\n const max = this[MAX] = options.max || Infinity\n\n const lc = options.length || naiveLength\n this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc\n this[ALLOW_STALE] = options.stale || false\n if (options.maxAge && typeof options.maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n this[MAX_AGE] = options.maxAge || 0\n this[DISPOSE] = options.dispose\n this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false\n this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false\n this.reset()\n }\n\n // resize the cache when the max changes.\n set max (mL) {\n if (typeof mL !== 'number' || mL < 0)\n throw new TypeError('max must be a non-negative number')\n\n this[MAX] = mL || Infinity\n trim(this)\n }\n get max () {\n return this[MAX]\n }\n\n set allowStale (allowStale) {\n this[ALLOW_STALE] = !!allowStale\n }\n get allowStale () {\n return this[ALLOW_STALE]\n }\n\n set maxAge (mA) {\n if (typeof mA !== 'number')\n throw new TypeError('maxAge must be a non-negative number')\n\n this[MAX_AGE] = mA\n trim(this)\n }\n get maxAge () {\n return this[MAX_AGE]\n }\n\n // resize the cache when the lengthCalculator changes.\n set lengthCalculator (lC) {\n if (typeof lC !== 'function')\n lC = naiveLength\n\n if (lC !== this[LENGTH_CALCULATOR]) {\n this[LENGTH_CALCULATOR] = lC\n this[LENGTH] = 0\n this[LRU_LIST].forEach(hit => {\n hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)\n this[LENGTH] += hit.length\n })\n }\n trim(this)\n }\n get lengthCalculator () { return this[LENGTH_CALCULATOR] }\n\n get length () { return this[LENGTH] }\n get itemCount () { return this[LRU_LIST].length }\n\n rforEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].tail; walker !== null;) {\n const prev = walker.prev\n forEachStep(this, fn, walker, thisp)\n walker = prev\n }\n }\n\n forEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].head; walker !== null;) {\n const next = walker.next\n forEachStep(this, fn, walker, thisp)\n walker = next\n }\n }\n\n keys () {\n return this[LRU_LIST].toArray().map(k => k.key)\n }\n\n values () {\n return this[LRU_LIST].toArray().map(k => k.value)\n }\n\n reset () {\n if (this[DISPOSE] &&\n this[LRU_LIST] &&\n this[LRU_LIST].length) {\n this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))\n }\n\n this[CACHE] = new Map() // hash of items by key\n this[LRU_LIST] = new Yallist() // list of items in order of use recency\n this[LENGTH] = 0 // length of items in the list\n }\n\n dump () {\n return this[LRU_LIST].map(hit =>\n isStale(this, hit) ? false : {\n k: hit.key,\n v: hit.value,\n e: hit.now + (hit.maxAge || 0)\n }).toArray().filter(h => h)\n }\n\n dumpLru () {\n return this[LRU_LIST]\n }\n\n set (key, value, maxAge) {\n maxAge = maxAge || this[MAX_AGE]\n\n if (maxAge && typeof maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n\n const now = maxAge ? Date.now() : 0\n const len = this[LENGTH_CALCULATOR](value, key)\n\n if (this[CACHE].has(key)) {\n if (len > this[MAX]) {\n del(this, this[CACHE].get(key))\n return false\n }\n\n const node = this[CACHE].get(key)\n const item = node.value\n\n // dispose of the old one before overwriting\n // split out into 2 ifs for better coverage tracking\n if (this[DISPOSE]) {\n if (!this[NO_DISPOSE_ON_SET])\n this[DISPOSE](key, item.value)\n }\n\n item.now = now\n item.maxAge = maxAge\n item.value = value\n this[LENGTH] += len - item.length\n item.length = len\n this.get(key)\n trim(this)\n return true\n }\n\n const hit = new Entry(key, value, len, now, maxAge)\n\n // oversized objects fall out of cache automatically.\n if (hit.length > this[MAX]) {\n if (this[DISPOSE])\n this[DISPOSE](key, value)\n\n return false\n }\n\n this[LENGTH] += hit.length\n this[LRU_LIST].unshift(hit)\n this[CACHE].set(key, this[LRU_LIST].head)\n trim(this)\n return true\n }\n\n has (key) {\n if (!this[CACHE].has(key)) return false\n const hit = this[CACHE].get(key).value\n return !isStale(this, hit)\n }\n\n get (key) {\n return get(this, key, true)\n }\n\n peek (key) {\n return get(this, key, false)\n }\n\n pop () {\n const node = this[LRU_LIST].tail\n if (!node)\n return null\n\n del(this, node)\n return node.value\n }\n\n del (key) {\n del(this, this[CACHE].get(key))\n }\n\n load (arr) {\n // reset the cache\n this.reset()\n\n const now = Date.now()\n // A previous serialized cache has the most recent items first\n for (let l = arr.length - 1; l >= 0; l--) {\n const hit = arr[l]\n const expiresAt = hit.e || 0\n if (expiresAt === 0)\n // the item was created without expiration in a non aged cache\n this.set(hit.k, hit.v)\n else {\n const maxAge = expiresAt - now\n // dont add already expired items\n if (maxAge > 0) {\n this.set(hit.k, hit.v, maxAge)\n }\n }\n }\n }\n\n prune () {\n this[CACHE].forEach((value, key) => get(this, key, false))\n }\n}\n\nconst get = (self, key, doUse) => {\n const node = self[CACHE].get(key)\n if (node) {\n const hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n return undefined\n } else {\n if (doUse) {\n if (self[UPDATE_AGE_ON_GET])\n node.value.now = Date.now()\n self[LRU_LIST].unshiftNode(node)\n }\n }\n return hit.value\n }\n}\n\nconst isStale = (self, hit) => {\n if (!hit || (!hit.maxAge && !self[MAX_AGE]))\n return false\n\n const diff = Date.now() - hit.now\n return hit.maxAge ? diff > hit.maxAge\n : self[MAX_AGE] && (diff > self[MAX_AGE])\n}\n\nconst trim = self => {\n if (self[LENGTH] > self[MAX]) {\n for (let walker = self[LRU_LIST].tail;\n self[LENGTH] > self[MAX] && walker !== null;) {\n // We know that we're about to delete this one, and also\n // what the next least recently used key will be, so just\n // go ahead and set it now.\n const prev = walker.prev\n del(self, walker)\n walker = prev\n }\n }\n}\n\nconst del = (self, node) => {\n if (node) {\n const hit = node.value\n if (self[DISPOSE])\n self[DISPOSE](hit.key, hit.value)\n\n self[LENGTH] -= hit.length\n self[CACHE].delete(hit.key)\n self[LRU_LIST].removeNode(node)\n }\n}\n\nclass Entry {\n constructor (key, value, length, now, maxAge) {\n this.key = key\n this.value = value\n this.length = length\n this.now = now\n this.maxAge = maxAge || 0\n }\n}\n\nconst forEachStep = (self, fn, node, thisp) => {\n let hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n hit = undefined\n }\n if (hit)\n fn.call(thisp, hit.value, hit.key, self)\n}\n\nmodule.exports = LRUCache\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","const ANY = Symbol('SemVer ANY')\n// hoisted class for cyclic dependency\nclass Comparator {\n static get ANY () {\n return ANY\n }\n\n constructor (comp, options) {\n options = parseOptions(options)\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n }\n\n parse (comp) {\n const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n const m = comp.match(r)\n\n if (!m) {\n throw new TypeError(`Invalid comparator: ${comp}`)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n }\n\n toString () {\n return this.value\n }\n\n test (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n }\n\n intersects (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false,\n }\n }\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n return new Range(comp.value, options).test(this.value)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n return new Range(this.value, options).test(comp.semver)\n }\n\n const sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n const sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n const sameSemVer = this.semver.version === comp.semver.version\n const differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n const oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<')\n const oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>')\n\n return (\n sameDirectionIncreasing ||\n sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan ||\n oppositeDirectionsGreaterThan\n )\n }\n}\n\nmodule.exports = Comparator\n\nconst parseOptions = require('../internal/parse-options')\nconst { re, t } = require('../internal/re')\nconst cmp = require('../functions/cmp')\nconst debug = require('../internal/debug')\nconst SemVer = require('./semver')\nconst Range = require('./range')\n","// hoisted class for cyclic dependency\nclass Range {\n constructor (range, options) {\n options = parseOptions(options)\n\n if (range instanceof Range) {\n if (\n range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease\n ) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n // just put it in the set and return\n this.raw = range.value\n this.set = [[range]]\n this.format()\n return this\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range\n .split('||')\n // map the range to a 2d array of comparators\n .map(r => this.parseRange(r.trim()))\n // throw out any comparator lists that are empty\n // this generally means that it was not a valid range, which is allowed\n // in loose mode, but will still throw if the WHOLE range is invalid.\n .filter(c => c.length)\n\n if (!this.set.length) {\n throw new TypeError(`Invalid SemVer Range: ${range}`)\n }\n\n // if we have any that are not the null set, throw out null sets.\n if (this.set.length > 1) {\n // keep the first one, in case they're all null sets\n const first = this.set[0]\n this.set = this.set.filter(c => !isNullSet(c[0]))\n if (this.set.length === 0) {\n this.set = [first]\n } else if (this.set.length > 1) {\n // if we have any that are *, then the range is just *\n for (const c of this.set) {\n if (c.length === 1 && isAny(c[0])) {\n this.set = [c]\n break\n }\n }\n }\n }\n\n this.format()\n }\n\n format () {\n this.range = this.set\n .map((comps) => {\n return comps.join(' ').trim()\n })\n .join('||')\n .trim()\n return this.range\n }\n\n toString () {\n return this.range\n }\n\n parseRange (range) {\n range = range.trim()\n\n // memoize range parsing for performance.\n // this is a very hot path, and fully deterministic.\n const memoOpts = Object.keys(this.options).join(',')\n const memoKey = `parseRange:${memoOpts}:${range}`\n const cached = cache.get(memoKey)\n if (cached) {\n return cached\n }\n\n const loose = this.options.loose\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace(this.options.includePrerelease))\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range)\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n let rangeList = range\n .split(' ')\n .map(comp => parseComparator(comp, this.options))\n .join(' ')\n .split(/\\s+/)\n // >=0.0.0 is equivalent to *\n .map(comp => replaceGTE0(comp, this.options))\n\n if (loose) {\n // in loose mode, throw out any that are not valid comparators\n rangeList = rangeList.filter(comp => {\n debug('loose invalid filter', comp, this.options)\n return !!comp.match(re[t.COMPARATORLOOSE])\n })\n }\n debug('range list', rangeList)\n\n // if any comparators are the null set, then replace with JUST null set\n // if more than one comparator, remove any * comparators\n // also, don't include the same comparator more than once\n const rangeMap = new Map()\n const comparators = rangeList.map(comp => new Comparator(comp, this.options))\n for (const comp of comparators) {\n if (isNullSet(comp)) {\n return [comp]\n }\n rangeMap.set(comp.value, comp)\n }\n if (rangeMap.size > 1 && rangeMap.has('')) {\n rangeMap.delete('')\n }\n\n const result = [...rangeMap.values()]\n cache.set(memoKey, result)\n return result\n }\n\n intersects (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some((thisComparators) => {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some((rangeComparators) => {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every((thisComparator) => {\n return rangeComparators.every((rangeComparator) => {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n }\n\n // if ANY of the sets match ALL of its comparators, then pass\n test (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (let i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n }\n}\nmodule.exports = Range\n\nconst LRU = require('lru-cache')\nconst cache = new LRU({ max: 1000 })\n\nconst parseOptions = require('../internal/parse-options')\nconst Comparator = require('./comparator')\nconst debug = require('../internal/debug')\nconst SemVer = require('./semver')\nconst {\n re,\n t,\n comparatorTrimReplace,\n tildeTrimReplace,\n caretTrimReplace,\n} = require('../internal/re')\n\nconst isNullSet = c => c.value === '<0.0.0-0'\nconst isAny = c => c.value === ''\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nconst isSatisfiable = (comparators, options) => {\n let result = true\n const remainingComparators = comparators.slice()\n let testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every((otherComparator) => {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nconst parseComparator = (comp, options) => {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nconst isX = id => !id || id.toLowerCase() === 'x' || id === '*'\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0\nconst replaceTildes = (comp, options) =>\n comp.trim().split(/\\s+/).map((c) => {\n return replaceTilde(c, options)\n }).join(' ')\n\nconst replaceTilde = (comp, options) => {\n const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, (_, M, m, p, pr) => {\n debug('tilde', comp, _, M, m, p, pr)\n let ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = `>=${M}.0.0 <${+M + 1}.0.0-0`\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0-0\n ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = `>=${M}.${m}.${p}-${pr\n } <${M}.${+m + 1}.0-0`\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0-0\n ret = `>=${M}.${m}.${p\n } <${M}.${+m + 1}.0-0`\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0\n// ^1.2.3 --> >=1.2.3 <2.0.0-0\n// ^1.2.0 --> >=1.2.0 <2.0.0-0\nconst replaceCarets = (comp, options) =>\n comp.trim().split(/\\s+/).map((c) => {\n return replaceCaret(c, options)\n }).join(' ')\n\nconst replaceCaret = (comp, options) => {\n debug('caret', comp, options)\n const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n const z = options.includePrerelease ? '-0' : ''\n return comp.replace(r, (_, M, m, p, pr) => {\n debug('caret', comp, _, M, m, p, pr)\n let ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`\n } else if (isX(p)) {\n if (M === '0') {\n ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`\n } else {\n ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = `>=${M}.${m}.${p}-${pr\n } <${M}.${m}.${+p + 1}-0`\n } else {\n ret = `>=${M}.${m}.${p}-${pr\n } <${M}.${+m + 1}.0-0`\n }\n } else {\n ret = `>=${M}.${m}.${p}-${pr\n } <${+M + 1}.0.0-0`\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = `>=${M}.${m}.${p\n }${z} <${M}.${m}.${+p + 1}-0`\n } else {\n ret = `>=${M}.${m}.${p\n }${z} <${M}.${+m + 1}.0-0`\n }\n } else {\n ret = `>=${M}.${m}.${p\n } <${+M + 1}.0.0-0`\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nconst replaceXRanges = (comp, options) => {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map((c) => {\n return replaceXRange(c, options)\n }).join(' ')\n}\n\nconst replaceXRange = (comp, options) => {\n comp = comp.trim()\n const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, (ret, gtlt, M, m, p, pr) => {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n const xM = isX(M)\n const xm = xM || isX(m)\n const xp = xm || isX(p)\n const anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n if (gtlt === '<') {\n pr = '-0'\n }\n\n ret = `${gtlt + M}.${m}.${p}${pr}`\n } else if (xm) {\n ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`\n } else if (xp) {\n ret = `>=${M}.${m}.0${pr\n } <${M}.${+m + 1}.0-0`\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nconst replaceStars = (comp, options) => {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\nconst replaceGTE0 = (comp, options) => {\n debug('replaceGTE0', comp, options)\n return comp.trim()\n .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0-0\nconst hyphenReplace = incPr => ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) => {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = `>=${fM}.0.0${incPr ? '-0' : ''}`\n } else if (isX(fp)) {\n from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`\n } else if (fpr) {\n from = `>=${from}`\n } else {\n from = `>=${from}${incPr ? '-0' : ''}`\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = `<${+tM + 1}.0.0-0`\n } else if (isX(tp)) {\n to = `<${tM}.${+tm + 1}.0-0`\n } else if (tpr) {\n to = `<=${tM}.${tm}.${tp}-${tpr}`\n } else if (incPr) {\n to = `<${tM}.${tm}.${+tp + 1}-0`\n } else {\n to = `<=${to}`\n }\n\n return (`${from} ${to}`).trim()\n}\n\nconst testSet = (set, version, options) => {\n for (let i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (let i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === Comparator.ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n const allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n","const debug = require('../internal/debug')\nconst { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants')\nconst { re, t } = require('../internal/re')\n\nconst parseOptions = require('../internal/parse-options')\nconst { compareIdentifiers } = require('../internal/identifiers')\nclass SemVer {\n constructor (version, options) {\n options = parseOptions(options)\n\n if (version instanceof SemVer) {\n if (version.loose === !!options.loose &&\n version.includePrerelease === !!options.includePrerelease) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError(`Invalid Version: ${version}`)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError(\n `version is longer than ${MAX_LENGTH} characters`\n )\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n // this isn't actually relevant for versions, but keep it so that we\n // don't run into trouble passing this.options around.\n this.includePrerelease = !!options.includePrerelease\n\n const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError(`Invalid Version: ${version}`)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map((id) => {\n if (/^[0-9]+$/.test(id)) {\n const num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n }\n\n format () {\n this.version = `${this.major}.${this.minor}.${this.patch}`\n if (this.prerelease.length) {\n this.version += `-${this.prerelease.join('.')}`\n }\n return this.version\n }\n\n toString () {\n return this.version\n }\n\n compare (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n if (typeof other === 'string' && other === this.version) {\n return 0\n }\n other = new SemVer(other, this.options)\n }\n\n if (other.version === this.version) {\n return 0\n }\n\n return this.compareMain(other) || this.comparePre(other)\n }\n\n compareMain (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return (\n compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n )\n }\n\n comparePre (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n let i = 0\n do {\n const a = this.prerelease[i]\n const b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n }\n\n compareBuild (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n let i = 0\n do {\n const a = this.build[i]\n const b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n }\n\n // preminor will bump the version up to the next minor release, and immediately\n // down to pre-release. premajor and prepatch work the same way.\n inc (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (\n this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0\n ) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n let i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (compareIdentifiers(this.prerelease[0], identifier) === 0) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error(`invalid increment argument: ${release}`)\n }\n this.format()\n this.raw = this.version\n return this\n }\n}\n\nmodule.exports = SemVer\n","const parse = require('./parse')\nconst clean = (version, options) => {\n const s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\nmodule.exports = clean\n","const eq = require('./eq')\nconst neq = require('./neq')\nconst gt = require('./gt')\nconst gte = require('./gte')\nconst lt = require('./lt')\nconst lte = require('./lte')\n\nconst cmp = (a, op, b, loose) => {\n switch (op) {\n case '===':\n if (typeof a === 'object') {\n a = a.version\n }\n if (typeof b === 'object') {\n b = b.version\n }\n return a === b\n\n case '!==':\n if (typeof a === 'object') {\n a = a.version\n }\n if (typeof b === 'object') {\n b = b.version\n }\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError(`Invalid operator: ${op}`)\n }\n}\nmodule.exports = cmp\n","const SemVer = require('../classes/semver')\nconst parse = require('./parse')\nconst { re, t } = require('../internal/re')\n\nconst coerce = (version, options) => {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n let match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n let next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)\n}\nmodule.exports = coerce\n","const SemVer = require('../classes/semver')\nconst compareBuild = (a, b, loose) => {\n const versionA = new SemVer(a, loose)\n const versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\nmodule.exports = compareBuild\n","const compare = require('./compare')\nconst compareLoose = (a, b) => compare(a, b, true)\nmodule.exports = compareLoose\n","const SemVer = require('../classes/semver')\nconst compare = (a, b, loose) =>\n new SemVer(a, loose).compare(new SemVer(b, loose))\n\nmodule.exports = compare\n","const parse = require('./parse')\nconst eq = require('./eq')\n\nconst diff = (version1, version2) => {\n if (eq(version1, version2)) {\n return null\n } else {\n const v1 = parse(version1)\n const v2 = parse(version2)\n const hasPre = v1.prerelease.length || v2.prerelease.length\n const prefix = hasPre ? 'pre' : ''\n const defaultResult = hasPre ? 'prerelease' : ''\n for (const key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\nmodule.exports = diff\n","const compare = require('./compare')\nconst eq = (a, b, loose) => compare(a, b, loose) === 0\nmodule.exports = eq\n","const compare = require('./compare')\nconst gt = (a, b, loose) => compare(a, b, loose) > 0\nmodule.exports = gt\n","const compare = require('./compare')\nconst gte = (a, b, loose) => compare(a, b, loose) >= 0\nmodule.exports = gte\n","const SemVer = require('../classes/semver')\n\nconst inc = (version, release, options, identifier) => {\n if (typeof (options) === 'string') {\n identifier = options\n options = undefined\n }\n\n try {\n return new SemVer(\n version instanceof SemVer ? version.version : version,\n options\n ).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\nmodule.exports = inc\n","const compare = require('./compare')\nconst lt = (a, b, loose) => compare(a, b, loose) < 0\nmodule.exports = lt\n","const compare = require('./compare')\nconst lte = (a, b, loose) => compare(a, b, loose) <= 0\nmodule.exports = lte\n","const SemVer = require('../classes/semver')\nconst major = (a, loose) => new SemVer(a, loose).major\nmodule.exports = major\n","const SemVer = require('../classes/semver')\nconst minor = (a, loose) => new SemVer(a, loose).minor\nmodule.exports = minor\n","const compare = require('./compare')\nconst neq = (a, b, loose) => compare(a, b, loose) !== 0\nmodule.exports = neq\n","const { MAX_LENGTH } = require('../internal/constants')\nconst { re, t } = require('../internal/re')\nconst SemVer = require('../classes/semver')\n\nconst parseOptions = require('../internal/parse-options')\nconst parse = (version, options) => {\n options = parseOptions(options)\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n const r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nmodule.exports = parse\n","const SemVer = require('../classes/semver')\nconst patch = (a, loose) => new SemVer(a, loose).patch\nmodule.exports = patch\n","const parse = require('./parse')\nconst prerelease = (version, options) => {\n const parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\nmodule.exports = prerelease\n","const compare = require('./compare')\nconst rcompare = (a, b, loose) => compare(b, a, loose)\nmodule.exports = rcompare\n","const compareBuild = require('./compare-build')\nconst rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose))\nmodule.exports = rsort\n","const Range = require('../classes/range')\nconst satisfies = (version, range, options) => {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\nmodule.exports = satisfies\n","const compareBuild = require('./compare-build')\nconst sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose))\nmodule.exports = sort\n","const parse = require('./parse')\nconst valid = (version, options) => {\n const v = parse(version, options)\n return v ? v.version : null\n}\nmodule.exports = valid\n","// just pre-load all the stuff that index.js lazily exports\nconst internalRe = require('./internal/re')\nmodule.exports = {\n re: internalRe.re,\n src: internalRe.src,\n tokens: internalRe.t,\n SEMVER_SPEC_VERSION: require('./internal/constants').SEMVER_SPEC_VERSION,\n SemVer: require('./classes/semver'),\n compareIdentifiers: require('./internal/identifiers').compareIdentifiers,\n rcompareIdentifiers: require('./internal/identifiers').rcompareIdentifiers,\n parse: require('./functions/parse'),\n valid: require('./functions/valid'),\n clean: require('./functions/clean'),\n inc: require('./functions/inc'),\n diff: require('./functions/diff'),\n major: require('./functions/major'),\n minor: require('./functions/minor'),\n patch: require('./functions/patch'),\n prerelease: require('./functions/prerelease'),\n compare: require('./functions/compare'),\n rcompare: require('./functions/rcompare'),\n compareLoose: require('./functions/compare-loose'),\n compareBuild: require('./functions/compare-build'),\n sort: require('./functions/sort'),\n rsort: require('./functions/rsort'),\n gt: require('./functions/gt'),\n lt: require('./functions/lt'),\n eq: require('./functions/eq'),\n neq: require('./functions/neq'),\n gte: require('./functions/gte'),\n lte: require('./functions/lte'),\n cmp: require('./functions/cmp'),\n coerce: require('./functions/coerce'),\n Comparator: require('./classes/comparator'),\n Range: require('./classes/range'),\n satisfies: require('./functions/satisfies'),\n toComparators: require('./ranges/to-comparators'),\n maxSatisfying: require('./ranges/max-satisfying'),\n minSatisfying: require('./ranges/min-satisfying'),\n minVersion: require('./ranges/min-version'),\n validRange: require('./ranges/valid'),\n outside: require('./ranges/outside'),\n gtr: require('./ranges/gtr'),\n ltr: require('./ranges/ltr'),\n intersects: require('./ranges/intersects'),\n simplifyRange: require('./ranges/simplify'),\n subset: require('./ranges/subset'),\n}\n","// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nconst SEMVER_SPEC_VERSION = '2.0.0'\n\nconst MAX_LENGTH = 256\nconst MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n/* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nconst MAX_SAFE_COMPONENT_LENGTH = 16\n\nmodule.exports = {\n SEMVER_SPEC_VERSION,\n MAX_LENGTH,\n MAX_SAFE_INTEGER,\n MAX_SAFE_COMPONENT_LENGTH,\n}\n","const debug = (\n typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)\n) ? (...args) => console.error('SEMVER', ...args)\n : () => {}\n\nmodule.exports = debug\n","const numeric = /^[0-9]+$/\nconst compareIdentifiers = (a, b) => {\n const anum = numeric.test(a)\n const bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nconst rcompareIdentifiers = (a, b) => compareIdentifiers(b, a)\n\nmodule.exports = {\n compareIdentifiers,\n rcompareIdentifiers,\n}\n","// parse out just the options we care about so we always get a consistent\n// obj with keys in a consistent order.\nconst opts = ['includePrerelease', 'loose', 'rtl']\nconst parseOptions = options =>\n !options ? {}\n : typeof options !== 'object' ? { loose: true }\n : opts.filter(k => options[k]).reduce((o, k) => {\n o[k] = true\n return o\n }, {})\nmodule.exports = parseOptions\n","const { MAX_SAFE_COMPONENT_LENGTH } = require('./constants')\nconst debug = require('./debug')\nexports = module.exports = {}\n\n// The actual regexps go on exports.re\nconst re = exports.re = []\nconst src = exports.src = []\nconst t = exports.t = {}\nlet R = 0\n\nconst createToken = (name, value, isGlobal) => {\n const index = R++\n debug(name, index, value)\n t[name] = index\n src[index] = value\n re[index] = new RegExp(value, isGlobal ? 'g' : undefined)\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ncreateToken('NUMERICIDENTIFIER', '0|[1-9]\\\\d*')\ncreateToken('NUMERICIDENTIFIERLOOSE', '[0-9]+')\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ncreateToken('NONNUMERICIDENTIFIER', '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*')\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ncreateToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\\\.` +\n `(${src[t.NUMERICIDENTIFIER]})\\\\.` +\n `(${src[t.NUMERICIDENTIFIER]})`)\n\ncreateToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\\\.` +\n `(${src[t.NUMERICIDENTIFIERLOOSE]})\\\\.` +\n `(${src[t.NUMERICIDENTIFIERLOOSE]})`)\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ncreateToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER]\n}|${src[t.NONNUMERICIDENTIFIER]})`)\n\ncreateToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE]\n}|${src[t.NONNUMERICIDENTIFIER]})`)\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ncreateToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER]\n}(?:\\\\.${src[t.PRERELEASEIDENTIFIER]})*))`)\n\ncreateToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]\n}(?:\\\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`)\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ncreateToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+')\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ncreateToken('BUILD', `(?:\\\\+(${src[t.BUILDIDENTIFIER]\n}(?:\\\\.${src[t.BUILDIDENTIFIER]})*))`)\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ncreateToken('FULLPLAIN', `v?${src[t.MAINVERSION]\n}${src[t.PRERELEASE]}?${\n src[t.BUILD]}?`)\n\ncreateToken('FULL', `^${src[t.FULLPLAIN]}$`)\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ncreateToken('LOOSEPLAIN', `[v=\\\\s]*${src[t.MAINVERSIONLOOSE]\n}${src[t.PRERELEASELOOSE]}?${\n src[t.BUILD]}?`)\n\ncreateToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`)\n\ncreateToken('GTLT', '((?:<|>)?=?)')\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ncreateToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\\\*`)\ncreateToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\\\*`)\n\ncreateToken('XRANGEPLAIN', `[v=\\\\s]*(${src[t.XRANGEIDENTIFIER]})` +\n `(?:\\\\.(${src[t.XRANGEIDENTIFIER]})` +\n `(?:\\\\.(${src[t.XRANGEIDENTIFIER]})` +\n `(?:${src[t.PRERELEASE]})?${\n src[t.BUILD]}?` +\n `)?)?`)\n\ncreateToken('XRANGEPLAINLOOSE', `[v=\\\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` +\n `(?:\\\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +\n `(?:\\\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +\n `(?:${src[t.PRERELEASELOOSE]})?${\n src[t.BUILD]}?` +\n `)?)?`)\n\ncreateToken('XRANGE', `^${src[t.GTLT]}\\\\s*${src[t.XRANGEPLAIN]}$`)\ncreateToken('XRANGELOOSE', `^${src[t.GTLT]}\\\\s*${src[t.XRANGEPLAINLOOSE]}$`)\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ncreateToken('COERCE', `${'(^|[^\\\\d])' +\n '(\\\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +\n `(?:\\\\.(\\\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +\n `(?:\\\\.(\\\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +\n `(?:$|[^\\\\d])`)\ncreateToken('COERCERTL', src[t.COERCE], true)\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ncreateToken('LONETILDE', '(?:~>?)')\n\ncreateToken('TILDETRIM', `(\\\\s*)${src[t.LONETILDE]}\\\\s+`, true)\nexports.tildeTrimReplace = '$1~'\n\ncreateToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`)\ncreateToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`)\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ncreateToken('LONECARET', '(?:\\\\^)')\n\ncreateToken('CARETTRIM', `(\\\\s*)${src[t.LONECARET]}\\\\s+`, true)\nexports.caretTrimReplace = '$1^'\n\ncreateToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`)\ncreateToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`)\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ncreateToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\\\s*(${src[t.LOOSEPLAIN]})$|^$`)\ncreateToken('COMPARATOR', `^${src[t.GTLT]}\\\\s*(${src[t.FULLPLAIN]})$|^$`)\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ncreateToken('COMPARATORTRIM', `(\\\\s*)${src[t.GTLT]\n}\\\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true)\nexports.comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ncreateToken('HYPHENRANGE', `^\\\\s*(${src[t.XRANGEPLAIN]})` +\n `\\\\s+-\\\\s+` +\n `(${src[t.XRANGEPLAIN]})` +\n `\\\\s*$`)\n\ncreateToken('HYPHENRANGELOOSE', `^\\\\s*(${src[t.XRANGEPLAINLOOSE]})` +\n `\\\\s+-\\\\s+` +\n `(${src[t.XRANGEPLAINLOOSE]})` +\n `\\\\s*$`)\n\n// Star ranges basically just allow anything at all.\ncreateToken('STAR', '(<|>)?=?\\\\s*\\\\*')\n// >=0.0.0 is like a star\ncreateToken('GTE0', '^\\\\s*>=\\\\s*0\\\\.0\\\\.0\\\\s*$')\ncreateToken('GTE0PRE', '^\\\\s*>=\\\\s*0\\\\.0\\\\.0-0\\\\s*$')\n","// Determine if version is greater than all the versions possible in the range.\nconst outside = require('./outside')\nconst gtr = (version, range, options) => outside(version, range, '>', options)\nmodule.exports = gtr\n","const Range = require('../classes/range')\nconst intersects = (r1, r2, options) => {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\nmodule.exports = intersects\n","const outside = require('./outside')\n// Determine if version is less than all the versions possible in the range\nconst ltr = (version, range, options) => outside(version, range, '<', options)\nmodule.exports = ltr\n","const SemVer = require('../classes/semver')\nconst Range = require('../classes/range')\n\nconst maxSatisfying = (versions, range, options) => {\n let max = null\n let maxSV = null\n let rangeObj = null\n try {\n rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach((v) => {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\nmodule.exports = maxSatisfying\n","const SemVer = require('../classes/semver')\nconst Range = require('../classes/range')\nconst minSatisfying = (versions, range, options) => {\n let min = null\n let minSV = null\n let rangeObj = null\n try {\n rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach((v) => {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\nmodule.exports = minSatisfying\n","const SemVer = require('../classes/semver')\nconst Range = require('../classes/range')\nconst gt = require('../functions/gt')\n\nconst minVersion = (range, loose) => {\n range = new Range(range, loose)\n\n let minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (let i = 0; i < range.set.length; ++i) {\n const comparators = range.set[i]\n\n let setMin = null\n comparators.forEach((comparator) => {\n // Clone to avoid manipulating the comparator's semver object.\n const compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!setMin || gt(compver, setMin)) {\n setMin = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error(`Unexpected operation: ${comparator.operator}`)\n }\n })\n if (setMin && (!minver || gt(minver, setMin))) {\n minver = setMin\n }\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\nmodule.exports = minVersion\n","const SemVer = require('../classes/semver')\nconst Comparator = require('../classes/comparator')\nconst { ANY } = Comparator\nconst Range = require('../classes/range')\nconst satisfies = require('../functions/satisfies')\nconst gt = require('../functions/gt')\nconst lt = require('../functions/lt')\nconst lte = require('../functions/lte')\nconst gte = require('../functions/gte')\n\nconst outside = (version, range, hilo, options) => {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n let gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisfies the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (let i = 0; i < range.set.length; ++i) {\n const comparators = range.set[i]\n\n let high = null\n let low = null\n\n comparators.forEach((comparator) => {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nmodule.exports = outside\n","// given a set of versions and a range, create a \"simplified\" range\n// that includes the same versions that the original range does\n// If the original range is shorter than the simplified one, return that.\nconst satisfies = require('../functions/satisfies.js')\nconst compare = require('../functions/compare.js')\nmodule.exports = (versions, range, options) => {\n const set = []\n let first = null\n let prev = null\n const v = versions.sort((a, b) => compare(a, b, options))\n for (const version of v) {\n const included = satisfies(version, range, options)\n if (included) {\n prev = version\n if (!first) {\n first = version\n }\n } else {\n if (prev) {\n set.push([first, prev])\n }\n prev = null\n first = null\n }\n }\n if (first) {\n set.push([first, null])\n }\n\n const ranges = []\n for (const [min, max] of set) {\n if (min === max) {\n ranges.push(min)\n } else if (!max && min === v[0]) {\n ranges.push('*')\n } else if (!max) {\n ranges.push(`>=${min}`)\n } else if (min === v[0]) {\n ranges.push(`<=${max}`)\n } else {\n ranges.push(`${min} - ${max}`)\n }\n }\n const simplified = ranges.join(' || ')\n const original = typeof range.raw === 'string' ? range.raw : String(range)\n return simplified.length < original.length ? simplified : range\n}\n","const Range = require('../classes/range.js')\nconst Comparator = require('../classes/comparator.js')\nconst { ANY } = Comparator\nconst satisfies = require('../functions/satisfies.js')\nconst compare = require('../functions/compare.js')\n\n// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:\n// - Every simple range `r1, r2, ...` is a null set, OR\n// - Every simple range `r1, r2, ...` which is not a null set is a subset of\n// some `R1, R2, ...`\n//\n// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:\n// - If c is only the ANY comparator\n// - If C is only the ANY comparator, return true\n// - Else if in prerelease mode, return false\n// - else replace c with `[>=0.0.0]`\n// - If C is only the ANY comparator\n// - if in prerelease mode, return true\n// - else replace C with `[>=0.0.0]`\n// - Let EQ be the set of = comparators in c\n// - If EQ is more than one, return true (null set)\n// - Let GT be the highest > or >= comparator in c\n// - Let LT be the lowest < or <= comparator in c\n// - If GT and LT, and GT.semver > LT.semver, return true (null set)\n// - If any C is a = range, and GT or LT are set, return false\n// - If EQ\n// - If GT, and EQ does not satisfy GT, return true (null set)\n// - If LT, and EQ does not satisfy LT, return true (null set)\n// - If EQ satisfies every C, return true\n// - Else return false\n// - If GT\n// - If GT.semver is lower than any > or >= comp in C, return false\n// - If GT is >=, and GT.semver does not satisfy every C, return false\n// - If GT.semver has a prerelease, and not in prerelease mode\n// - If no C has a prerelease and the GT.semver tuple, return false\n// - If LT\n// - If LT.semver is greater than any < or <= comp in C, return false\n// - If LT is <=, and LT.semver does not satisfy every C, return false\n// - If GT.semver has a prerelease, and not in prerelease mode\n// - If no C has a prerelease and the LT.semver tuple, return false\n// - Else return true\n\nconst subset = (sub, dom, options = {}) => {\n if (sub === dom) {\n return true\n }\n\n sub = new Range(sub, options)\n dom = new Range(dom, options)\n let sawNonNull = false\n\n OUTER: for (const simpleSub of sub.set) {\n for (const simpleDom of dom.set) {\n const isSub = simpleSubset(simpleSub, simpleDom, options)\n sawNonNull = sawNonNull || isSub !== null\n if (isSub) {\n continue OUTER\n }\n }\n // the null set is a subset of everything, but null simple ranges in\n // a complex range should be ignored. so if we saw a non-null range,\n // then we know this isn't a subset, but if EVERY simple range was null,\n // then it is a subset.\n if (sawNonNull) {\n return false\n }\n }\n return true\n}\n\nconst simpleSubset = (sub, dom, options) => {\n if (sub === dom) {\n return true\n }\n\n if (sub.length === 1 && sub[0].semver === ANY) {\n if (dom.length === 1 && dom[0].semver === ANY) {\n return true\n } else if (options.includePrerelease) {\n sub = [new Comparator('>=0.0.0-0')]\n } else {\n sub = [new Comparator('>=0.0.0')]\n }\n }\n\n if (dom.length === 1 && dom[0].semver === ANY) {\n if (options.includePrerelease) {\n return true\n } else {\n dom = [new Comparator('>=0.0.0')]\n }\n }\n\n const eqSet = new Set()\n let gt, lt\n for (const c of sub) {\n if (c.operator === '>' || c.operator === '>=') {\n gt = higherGT(gt, c, options)\n } else if (c.operator === '<' || c.operator === '<=') {\n lt = lowerLT(lt, c, options)\n } else {\n eqSet.add(c.semver)\n }\n }\n\n if (eqSet.size > 1) {\n return null\n }\n\n let gtltComp\n if (gt && lt) {\n gtltComp = compare(gt.semver, lt.semver, options)\n if (gtltComp > 0) {\n return null\n } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) {\n return null\n }\n }\n\n // will iterate one or zero times\n for (const eq of eqSet) {\n if (gt && !satisfies(eq, String(gt), options)) {\n return null\n }\n\n if (lt && !satisfies(eq, String(lt), options)) {\n return null\n }\n\n for (const c of dom) {\n if (!satisfies(eq, String(c), options)) {\n return false\n }\n }\n\n return true\n }\n\n let higher, lower\n let hasDomLT, hasDomGT\n // if the subset has a prerelease, we need a comparator in the superset\n // with the same tuple and a prerelease, or it's not a subset\n let needDomLTPre = lt &&\n !options.includePrerelease &&\n lt.semver.prerelease.length ? lt.semver : false\n let needDomGTPre = gt &&\n !options.includePrerelease &&\n gt.semver.prerelease.length ? gt.semver : false\n // exception: <1.2.3-0 is the same as <1.2.3\n if (needDomLTPre && needDomLTPre.prerelease.length === 1 &&\n lt.operator === '<' && needDomLTPre.prerelease[0] === 0) {\n needDomLTPre = false\n }\n\n for (const c of dom) {\n hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='\n hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='\n if (gt) {\n if (needDomGTPre) {\n if (c.semver.prerelease && c.semver.prerelease.length &&\n c.semver.major === needDomGTPre.major &&\n c.semver.minor === needDomGTPre.minor &&\n c.semver.patch === needDomGTPre.patch) {\n needDomGTPre = false\n }\n }\n if (c.operator === '>' || c.operator === '>=') {\n higher = higherGT(gt, c, options)\n if (higher === c && higher !== gt) {\n return false\n }\n } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) {\n return false\n }\n }\n if (lt) {\n if (needDomLTPre) {\n if (c.semver.prerelease && c.semver.prerelease.length &&\n c.semver.major === needDomLTPre.major &&\n c.semver.minor === needDomLTPre.minor &&\n c.semver.patch === needDomLTPre.patch) {\n needDomLTPre = false\n }\n }\n if (c.operator === '<' || c.operator === '<=') {\n lower = lowerLT(lt, c, options)\n if (lower === c && lower !== lt) {\n return false\n }\n } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) {\n return false\n }\n }\n if (!c.operator && (lt || gt) && gtltComp !== 0) {\n return false\n }\n }\n\n // if there was a < or >, and nothing in the dom, then must be false\n // UNLESS it was limited by another range in the other direction.\n // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0\n if (gt && hasDomLT && !lt && gtltComp !== 0) {\n return false\n }\n\n if (lt && hasDomGT && !gt && gtltComp !== 0) {\n return false\n }\n\n // we needed a prerelease range in a specific tuple, but didn't get one\n // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0,\n // because it includes prereleases in the 1.2.3 tuple\n if (needDomGTPre || needDomLTPre) {\n return false\n }\n\n return true\n}\n\n// >=1.2.3 is lower than >1.2.3\nconst higherGT = (a, b, options) => {\n if (!a) {\n return b\n }\n const comp = compare(a.semver, b.semver, options)\n return comp > 0 ? a\n : comp < 0 ? b\n : b.operator === '>' && a.operator === '>=' ? b\n : a\n}\n\n// <=1.2.3 is higher than <1.2.3\nconst lowerLT = (a, b, options) => {\n if (!a) {\n return b\n }\n const comp = compare(a.semver, b.semver, options)\n return comp < 0 ? a\n : comp > 0 ? b\n : b.operator === '<' && a.operator === '<=' ? b\n : a\n}\n\nmodule.exports = subset\n","const Range = require('../classes/range')\n\n// Mostly just for testing and legacy API reasons\nconst toComparators = (range, options) =>\n new Range(range, options).set\n .map(comp => comp.map(c => c.value).join(' ').trim().split(' '))\n\nmodule.exports = toComparators\n","const Range = require('../classes/range')\nconst validRange = (range, options) => {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\nmodule.exports = validRange\n","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","'use strict'\n\nexports.fromCallback = function (fn) {\n return Object.defineProperty(function (...args) {\n if (typeof args[args.length - 1] === 'function') fn.apply(this, args)\n else {\n return new Promise((resolve, reject) => {\n fn.call(\n this,\n ...args,\n (err, res) => (err != null) ? reject(err) : resolve(res)\n )\n })\n }\n }, 'name', { value: fn.name })\n}\n\nexports.fromPromise = function (fn) {\n return Object.defineProperty(function (...args) {\n const cb = args[args.length - 1]\n if (typeof cb !== 'function') return fn.apply(this, args)\n else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)\n }, 'name', { value: fn.name })\n}\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict'\nmodule.exports = function (Yallist) {\n Yallist.prototype[Symbol.iterator] = function* () {\n for (let walker = this.head; walker; walker = walker.next) {\n yield walker.value\n }\n }\n}\n","'use strict'\nmodule.exports = Yallist\n\nYallist.Node = Node\nYallist.create = Yallist\n\nfunction Yallist (list) {\n var self = this\n if (!(self instanceof Yallist)) {\n self = new Yallist()\n }\n\n self.tail = null\n self.head = null\n self.length = 0\n\n if (list && typeof list.forEach === 'function') {\n list.forEach(function (item) {\n self.push(item)\n })\n } else if (arguments.length > 0) {\n for (var i = 0, l = arguments.length; i < l; i++) {\n self.push(arguments[i])\n }\n }\n\n return self\n}\n\nYallist.prototype.removeNode = function (node) {\n if (node.list !== this) {\n throw new Error('removing node which does not belong to this list')\n }\n\n var next = node.next\n var prev = node.prev\n\n if (next) {\n next.prev = prev\n }\n\n if (prev) {\n prev.next = next\n }\n\n if (node === this.head) {\n this.head = next\n }\n if (node === this.tail) {\n this.tail = prev\n }\n\n node.list.length--\n node.next = null\n node.prev = null\n node.list = null\n\n return next\n}\n\nYallist.prototype.unshiftNode = function (node) {\n if (node === this.head) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var head = this.head\n node.list = this\n node.next = head\n if (head) {\n head.prev = node\n }\n\n this.head = node\n if (!this.tail) {\n this.tail = node\n }\n this.length++\n}\n\nYallist.prototype.pushNode = function (node) {\n if (node === this.tail) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var tail = this.tail\n node.list = this\n node.prev = tail\n if (tail) {\n tail.next = node\n }\n\n this.tail = node\n if (!this.head) {\n this.head = node\n }\n this.length++\n}\n\nYallist.prototype.push = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n push(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.unshift = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n unshift(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.pop = function () {\n if (!this.tail) {\n return undefined\n }\n\n var res = this.tail.value\n this.tail = this.tail.prev\n if (this.tail) {\n this.tail.next = null\n } else {\n this.head = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.shift = function () {\n if (!this.head) {\n return undefined\n }\n\n var res = this.head.value\n this.head = this.head.next\n if (this.head) {\n this.head.prev = null\n } else {\n this.tail = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.forEach = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.head, i = 0; walker !== null; i++) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.next\n }\n}\n\nYallist.prototype.forEachReverse = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.prev\n }\n}\n\nYallist.prototype.get = function (n) {\n for (var i = 0, walker = this.head; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.next\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.getReverse = function (n) {\n for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.prev\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.map = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.head; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.next\n }\n return res\n}\n\nYallist.prototype.mapReverse = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.tail; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.prev\n }\n return res\n}\n\nYallist.prototype.reduce = function (fn, initial) {\n var acc\n var walker = this.head\n if (arguments.length > 1) {\n acc = initial\n } else if (this.head) {\n walker = this.head.next\n acc = this.head.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = 0; walker !== null; i++) {\n acc = fn(acc, walker.value, i)\n walker = walker.next\n }\n\n return acc\n}\n\nYallist.prototype.reduceReverse = function (fn, initial) {\n var acc\n var walker = this.tail\n if (arguments.length > 1) {\n acc = initial\n } else if (this.tail) {\n walker = this.tail.prev\n acc = this.tail.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = this.length - 1; walker !== null; i--) {\n acc = fn(acc, walker.value, i)\n walker = walker.prev\n }\n\n return acc\n}\n\nYallist.prototype.toArray = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.head; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.next\n }\n return arr\n}\n\nYallist.prototype.toArrayReverse = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.tail; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.prev\n }\n return arr\n}\n\nYallist.prototype.slice = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = 0, walker = this.head; walker !== null && i < from; i++) {\n walker = walker.next\n }\n for (; walker !== null && i < to; i++, walker = walker.next) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.sliceReverse = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {\n walker = walker.prev\n }\n for (; walker !== null && i > from; i--, walker = walker.prev) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.splice = function (start, deleteCount, ...nodes) {\n if (start > this.length) {\n start = this.length - 1\n }\n if (start < 0) {\n start = this.length + start;\n }\n\n for (var i = 0, walker = this.head; walker !== null && i < start; i++) {\n walker = walker.next\n }\n\n var ret = []\n for (var i = 0; walker && i < deleteCount; i++) {\n ret.push(walker.value)\n walker = this.removeNode(walker)\n }\n if (walker === null) {\n walker = this.tail\n }\n\n if (walker !== this.head && walker !== this.tail) {\n walker = walker.prev\n }\n\n for (var i = 0; i < nodes.length; i++) {\n walker = insert(this, walker, nodes[i])\n }\n return ret;\n}\n\nYallist.prototype.reverse = function () {\n var head = this.head\n var tail = this.tail\n for (var walker = head; walker !== null; walker = walker.prev) {\n var p = walker.prev\n walker.prev = walker.next\n walker.next = p\n }\n this.head = tail\n this.tail = head\n return this\n}\n\nfunction insert (self, node, value) {\n var inserted = node === self.head ?\n new Node(value, null, node, self) :\n new Node(value, node, node.next, self)\n\n if (inserted.next === null) {\n self.tail = inserted\n }\n if (inserted.prev === null) {\n self.head = inserted\n }\n\n self.length++\n\n return inserted\n}\n\nfunction push (self, item) {\n self.tail = new Node(item, self.tail, null, self)\n if (!self.head) {\n self.head = self.tail\n }\n self.length++\n}\n\nfunction unshift (self, item) {\n self.head = new Node(item, null, self.head, self)\n if (!self.tail) {\n self.tail = self.head\n }\n self.length++\n}\n\nfunction Node (value, prev, next, list) {\n if (!(this instanceof Node)) {\n return new Node(value, prev, next, list)\n }\n\n this.list = list\n this.value = value\n\n if (prev) {\n prev.next = this\n this.prev = prev\n } else {\n this.prev = null\n }\n\n if (next) {\n next.prev = this\n this.next = next\n } else {\n this.next = null\n }\n}\n\ntry {\n // add if support for Symbol.iterator is present\n require('./iterator.js')(Yallist)\n} catch (er) {}\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment)\n node.comment = comment;\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n // TODO: assert being at last item?\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n map.range = [bm.offset, offset, offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n offset = props.end;\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n // TODO: assert being at last item?\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, offset, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.min(end.col - col, 80 - ci);\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start} ${lines.join(' ')} ${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, commentString(comment), indent);\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vcb = '';\n let valueComment = null;\n if (Node.isNode(value)) {\n if (value.spaceBefore)\n vcb = '\\n';\n if (value.commentBefore) {\n const cs = commentString(value.commentBefore);\n vcb += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n valueComment = value.comment;\n }\n else if (value && typeof value === 'object') {\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substr(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (vcb || keyComment) {\n if (valueStr === '' && !ctx.inFlow)\n ws = vcb === '\\n' ? '\\n\\n' : vcb;\n else\n ws = `${vcb}\\n${ctx.indent}`;\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const flow = valueStr[0] === '[' || valueStr[0] === '{';\n if (!flow || valueStr.includes('\\n'))\n ws = `\\n${ctx.indent}`;\n }\n else if (valueStr === '' || valueStr[0] === '\\n')\n ws = '';\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx) => ({\n indentAtStart: ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (indent === '' && containsDocumentMarker(value)) {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"constants\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACxUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACllCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1OA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC/bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChqDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtgBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC9RA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACNA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;;;A;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtBA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACjBA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChCA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACLA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACTA;AACA;AACA;;;A;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACrLA;AACA;AACA;AACA;;;A;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACNA;AACA;AACA;AACA;;;A;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AChMA;;;A;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;;AC3TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;A;;;;;AC3OA;;;A;;;;;;A;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;;;ACAA;;A;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;ACDA;AACA;AACA;AACA;;;;A","sourceRoot":""} \ No newline at end of file diff --git a/.github/actions/verify-chart-version/dist/licenses.txt b/.github/actions/verify-chart-version/dist/licenses.txt deleted file mode 100644 index d9d394e7..00000000 --- a/.github/actions/verify-chart-version/dist/licenses.txt +++ /dev/null @@ -1,777 +0,0 @@ -@actions/core -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/github -MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -@actions/http-client -MIT -Actions Http Client for Node.js - -Copyright (c) GitHub, Inc. - -All rights reserved. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -associated documentation files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT -LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/auth-token -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/core -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/endpoint -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/graphql -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/plugin-paginate-rest -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/plugin-rest-endpoint-methods -MIT -MIT License Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -@octokit/request -MIT -The MIT License - -Copyright (c) 2018 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@octokit/request-error -MIT -The MIT License - -Copyright (c) 2019 Octokit contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -@vercel/ncc -MIT -Copyright 2018 ZEIT, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -before-after-hook -Apache-2.0 - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2018 Gregor Martynus and other contributors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - -deprecation -ISC -The ISC License - -Copyright (c) Gregor Martynus and contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -fs-extra -MIT -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -graceful-fs -ISC -The ISC License - -Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -is-plain-object -MIT -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -jsonfile -MIT -(The MIT License) - -Copyright (c) 2012-2015, JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -lru-cache -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -node-fetch -MIT -The MIT License (MIT) - -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - - -once -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -semver -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -tr46 -MIT - -tunnel -MIT -The MIT License (MIT) - -Copyright (c) 2012 Koichi Kobayashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -universal-user-agent -ISC -# [ISC License](https://spdx.org/licenses/ISC) - -Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -universalify -MIT -(The MIT License) - -Copyright (c) 2017, Ryan Zimmerman - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the 'Software'), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -webidl-conversions -BSD-2-Clause -# The BSD 2-Clause License - -Copyright (c) 2014, Domenic Denicola -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -whatwg-url -MIT -The MIT License (MIT) - -Copyright (c) 2015–2016 Sebastian Mayr - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -wrappy -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -yallist -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -yaml -ISC -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/.github/actions/verify-chart-version/dist/sourcemap-register.js b/.github/actions/verify-chart-version/dist/sourcemap-register.js deleted file mode 100644 index 56566f1d..00000000 --- a/.github/actions/verify-chart-version/dist/sourcemap-register.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},284:(e,r,n)=>{e=n.nmd(e);var t=n(596).SourceMapConsumer;var o=n(622);var i;try{i=n(747);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(process.version)?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var a=n(837).I;var u=n(215);var s=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var a=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},747:e=>{"use strict";e.exports=require("fs")},622:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); \ No newline at end of file diff --git a/.github/actions/verify-chart-version/package-lock.json b/.github/actions/verify-chart-version/package-lock.json deleted file mode 100644 index c90126d6..00000000 --- a/.github/actions/verify-chart-version/package-lock.json +++ /dev/null @@ -1,632 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "verify-chart-version-bump", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "fs-extra": "^10.1.0", - "semver": "^7.3.7", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@types/semver": "7.3.12", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } - }, - "node_modules/@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "dependencies": { - "@actions/http-client": "^2.0.1" - } - }, - "node_modules/@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "dependencies": { - "tunnel": "^0.0.6" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "node_modules/@types/semver": { - "version": "7.3.12", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.12.tgz", - "integrity": "sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==", - "dev": true - }, - "node_modules/@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true, - "bin": { - "ncc": "dist/ncc/cli.js" - } - }, - "node_modules/before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "node_modules/yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==", - "engines": { - "node": ">= 14" - } - } - }, - "dependencies": { - "@actions/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.0.tgz", - "integrity": "sha512-5pbM693Ih59ZdUhgk+fts+bUWTnIdHV3kwOSr+QIoFHMLg7Gzhwm0cifDY/AG68ekEJAkHnQVpcy4f6GjmzBCA==", - "requires": { - "@actions/http-client": "^2.0.1" - } - }, - "@actions/github": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.3.tgz", - "integrity": "sha512-myjA/pdLQfhUGLtRZC/J4L1RXOG4o6aYdiEq+zr5wVVKljzbFld+xv10k1FX6IkIJtNxbAq44BdwSNpQ015P0A==", - "requires": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "requires": { - "tunnel": "^0.0.6" - } - }, - "@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "requires": { - "@octokit/types": "^6.0.3" - } - }, - "@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "requires": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "requires": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "requires": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "requires": { - "@octokit/types": "^6.40.0" - } - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "requires": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - } - }, - "@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "requires": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "requires": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "@types/fs-extra": { - "version": "9.0.13", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", - "integrity": "sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "@types/semver": { - "version": "7.3.12", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.12.tgz", - "integrity": "sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==", - "dev": true - }, - "@vercel/ncc": { - "version": "0.34.0", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.34.0.tgz", - "integrity": "sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==", - "dev": true - }, - "before-after-hook": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", - "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "requires": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" - }, - "typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "dev": true - }, - "universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yaml": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.1.tgz", - "integrity": "sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==" - } - } -} diff --git a/.github/actions/verify-chart-version/package.json b/.github/actions/verify-chart-version/package.json deleted file mode 100644 index 4555eb5c..00000000 --- a/.github/actions/verify-chart-version/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "name": "verify-chart-version-bump", - "version": "1.0.0", - "description": "", - "main": "lib/main.js", - "scripts": { - "build": "tsc", - "package": "npm run build && NODE_OPTIONS=--openssl-legacy-provider ncc build --source-map --license licenses.txt" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.9.0", - "@actions/github": "^5.0.3", - "fs-extra": "^10.1.0", - "semver": "^7.3.7", - "yaml": "^2.1.1" - }, - "devDependencies": { - "@types/fs-extra": "9.0.13", - "@types/node": "17.0.45", - "@types/semver": "7.3.12", - "@vercel/ncc": "0.34.0", - "typescript": "4.7.4" - } -} diff --git a/.github/actions/verify-chart-version/src/main.ts b/.github/actions/verify-chart-version/src/main.ts deleted file mode 100644 index 709376ea..00000000 --- a/.github/actions/verify-chart-version/src/main.ts +++ /dev/null @@ -1,106 +0,0 @@ -import * as core from "@actions/core"; -import * as github from "@actions/github"; -import * as YAML from "yaml"; - -import * as semver from "semver"; -import * as fs from "fs-extra"; - -function getErrorMessage(error: unknown) { - if (error instanceof Error) return error.message; - return String(error); -} - -async function run() { - try { - if (github.context.eventName !== "pull_request") { - core.setFailed("This action can only run on pull requests!"); - return; - } - - const githubToken = core.getInput("token"); - const chart = core.getInput("chart", { required: true }); - const base = core.getInput("base", { required: false }); - const chartYamlPath = `${chart}/Chart.yaml`; - - const defaultBranch = github.context.payload.repository?.default_branch; - const octokit = github.getOctokit(githubToken); - - if (!(await fs.pathExists(chartYamlPath))) { - core.setFailed(`${chart} is not a valid Helm chart folder!`); - return; - } - - if (base) { - try { - await octokit.rest.git.getRef({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - ref: base, - }); - } catch (error) { - core.setFailed(`Ref ${base} was not found for this repository!`); - return; - } - } - - var originalChartYamlFile; - var originalChartVersion; - try { - originalChartYamlFile = await octokit.rest.repos.getContent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - path: `${chartYamlPath}`, - ref: base || `heads/${defaultBranch}`, - }); - } catch (error) { - core.warning( - `Could not find original Chart.yaml for ${chart}, assuming this is a new chart.` - ); - } - - if (originalChartYamlFile && "content" in originalChartYamlFile.data) { - const originalChartYamlContent = Buffer.from( - originalChartYamlFile.data.content, - "base64" - ).toString("utf-8"); - const originalChartYaml = await YAML.parse(originalChartYamlContent); - originalChartVersion = originalChartYaml.version; - } - - const updatedChartYamlContent = await fs.readFile(chartYamlPath, "utf8"); - const updatedChartYaml = await YAML.parse(updatedChartYamlContent); - if (!updatedChartYaml.version) { - core.setFailed(`${chartYamlPath} does not contain a version!`); - return; - } - const updatedChartVersion = updatedChartYaml.version; - if (!semver.valid(updatedChartVersion)) { - core.setFailed(`${updatedChartVersion} is not a valid SemVer version!`); - return; - } - - if (originalChartVersion) { - if (updatedChartVersion == originalChartVersion) { - core.setFailed(`Chart version has not been updated!`); - return; - } - - if (!semver.gt(updatedChartVersion, originalChartVersion)) { - core.setFailed( - `Updated chart version ${updatedChartVersion} is < ${originalChartVersion}!` - ); - return; - } - - core.info(`Old chart version: ${originalChartVersion}`); - } - - core.info(`New chart version: ${updatedChartVersion}`); - - core.info(`New chart version verified succesfully.`); - } catch (error) { - core.setFailed(getErrorMessage(error)); - } -} - -run(); diff --git a/.github/actions/verify-chart-version/tsconfig.json b/.github/actions/verify-chart-version/tsconfig.json deleted file mode 100644 index f6e7cb5b..00000000 --- a/.github/actions/verify-chart-version/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ - "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ - "outDir": "./lib", /* Redirect output structure to the directory. */ - "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ - "strict": true, /* Enable all strict type-checking options. */ - "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ - "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - }, - "exclude": ["node_modules", "**/*.test.ts"] -} diff --git a/.github/workflows/charts-lint.yaml b/.github/workflows/charts-lint.yaml index 3331c1a2..a02fc779 100644 --- a/.github/workflows/charts-lint.yaml +++ b/.github/workflows/charts-lint.yaml @@ -36,13 +36,13 @@ jobs: ref: ${{ inputs.checkoutCommit }} - name: Verify chart version - uses: ./.github/actions/verify-chart-version + uses: bjw-s/helm-charts-actions/verify-chart-version@main id: verify-chart-version with: chart: "charts/${{ matrix.chart }}" - name: Verify chart changelog - uses: ./.github/actions/verify-chart-changelog + uses: bjw-s/helm-charts-actions/verify-chart-changelog@main if: inputs.isRenovatePR != 'true' id: verify-chart-changelog with: diff --git a/.github/workflows/charts-release.yaml b/.github/workflows/charts-release.yaml index db6bd566..b8c22d80 100644 --- a/.github/workflows/charts-release.yaml +++ b/.github/workflows/charts-release.yaml @@ -34,7 +34,7 @@ jobs: fetch-depth: 0 - name: Collect charts to release - uses: ./.github/actions/collect-charts + uses: bjw-s/helm-charts-actions/collect-charts@main id: collect-charts with: repoConfigFile: ./.ci/repo-config.yaml @@ -42,7 +42,7 @@ jobs: release-github-pages: name: Release Charts to GitHub pages - uses: bjw-s/helm-charts/.github/workflows/charts-release-ghpages.yaml@main + uses: ./.github/workflows/charts-release-ghpages.yaml needs: - prepare with: diff --git a/.github/workflows/pr-metadata.yaml b/.github/workflows/pr-metadata.yaml index 3a19ac03..75ac1b41 100644 --- a/.github/workflows/pr-metadata.yaml +++ b/.github/workflows/pr-metadata.yaml @@ -73,5 +73,5 @@ jobs: - added|modified: '**' - name: Collect changed charts - uses: ./.github/actions/collect-charts + uses: bjw-s/helm-charts-actions/collect-charts@main id: changed-charts diff --git a/.github/workflows/pr-validate.yaml b/.github/workflows/pr-validate.yaml index c8f7102e..0858aa06 100644 --- a/.github/workflows/pr-validate.yaml +++ b/.github/workflows/pr-validate.yaml @@ -18,17 +18,17 @@ concurrency: jobs: pr-metadata: - uses: bjw-s/helm-charts/.github/workflows/pr-metadata.yaml@main + uses: ./.github/workflows/pr-metadata.yaml pre-commit-check: - uses: bjw-s/helm-charts/.github/workflows/pre-commit-check.yaml@main + uses: ./.github/workflows/pre-commit-check.yaml needs: - pr-metadata with: modifiedFiles: ${{ needs.pr-metadata.outputs.addedOrModifiedFiles }} charts-lint: - uses: bjw-s/helm-charts/.github/workflows/charts-lint.yaml@main + uses: ./.github/workflows/charts-lint.yaml needs: - pr-metadata with: @@ -37,7 +37,7 @@ jobs: isRenovatePR: ${{ needs.pr-metadata.outputs.isRenovatePR }} charts-test: - uses: bjw-s/helm-charts/.github/workflows/charts-test.yaml@main + uses: ./.github/workflows/charts-test.yaml needs: - pr-metadata with: @@ -45,7 +45,7 @@ jobs: chartsToTest: ${{ needs.pr-metadata.outputs.chartsToInstall }} library-charts-test: - uses: bjw-s/helm-charts/.github/workflows/charts-test.yaml@main + uses: ./.github/workflows/charts-test.yaml needs: - pr-metadata with: