diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..8739a61 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,11422 @@ +require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 7351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(5278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 2186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(7351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(8041); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(5840); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); +const core_1 = __nccwpck_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 5526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 6255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9835: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(4219); + + +/***/ }), + +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 5840: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(8628)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); + +var _nil = _interopRequireDefault(__nccwpck_require__(5332)); + +var _version = _interopRequireDefault(__nccwpck_require__(1595)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 4569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 5332: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 2746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5274: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 8950: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 8628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _md = _interopRequireDefault(__nccwpck_require__(4569)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 5122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9120: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5274)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 5770: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formatKey = void 0; +/** + * Formats a input name to a snake case string + * + * - Removes leading and trailing whitespace + * - Converts to lowercase + * - Replaces spaces with underscores + * - Replaces non-alphanumeric characters with underscores + * - Replaces multiple consecutive underscores with a single underscore + */ +function formatKey(name) { + return name + .trim() + .toLowerCase() + .replace(/[^a-z0-9]/g, '_') + .replace(/^_+|_+$/g, '') + .replace(/_+/g, '_'); +} +exports.formatKey = formatKey; + + +/***/ }), + +/***/ 399: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const yaml_1 = __importDefault(__nccwpck_require__(4083)); +const parse_1 = __nccwpck_require__(6089); +/** + * The entrypoint for the action + */ +async function run() { + // Get inputs + const template = core.getInput('issue-form-template', { + required: true + }); + const workspace = core.getInput('workspace', { required: true }); + // Verify the template exists + if (!fs_1.default.existsSync(`${workspace.replace(/\/+$/, '')}/${template}`)) { + core.setFailed(`Template not found: ${template}`); + return; + } + // Read and parse the template + const templateYaml = yaml_1.default.parse(fs_1.default.readFileSync(`${workspace}/${template}`, 'utf8')); + await (0, parse_1.parseTemplate)(templateYaml); +} +exports.run = run; + + +/***/ }), + +/***/ 6089: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseTemplate = void 0; +const format_1 = __nccwpck_require__(5770); +/** + * Parses the issue form template and returns a dictionary of fields + * @param template The issue form template + * @returns A dictionary of fields + */ +async function parseTemplate(template) { + const fields = {}; + for (const item of template.body) { + // Skip markdown fields + if (item.type === 'markdown') + continue; + // Convert the label to snake case. This is the heading in the issue body + // when the form is submitted, and is used by issue-ops/parser as the key. + const formattedKey = (0, format_1.formatKey)(item.attributes.label); + // Take the rest of the attributes and add them to the fields object + fields[formattedKey] = { + type: item.type, + required: item.validations?.required || false + }; + if (item.type === 'dropdown') { + // These fields are only used by dropdowns + fields[formattedKey].default = item.attributes.default; + fields[formattedKey].multiple = item.attributes.multiple || false; + fields[formattedKey].options = item.attributes.options; + } + if (item.type === 'checkboxes') { + // Checkboxes have a different options format than dropdowns + // Enforce false for required if not present + fields[formattedKey].options = item.attributes.options.map((x) => { + return { label: x.label, required: x.required || false }; + }); + } + } + return fields; +} +exports.parseTemplate = parseTemplate; + + +/***/ }), + +/***/ 9491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 6113: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 2361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 7147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 3685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 2037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 1017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 4404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 8109: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var resolveBlockMap = __nccwpck_require__(2986); +var resolveBlockSeq = __nccwpck_require__(2289); +var resolveFlowCollection = __nccwpck_require__(45); + +function resolveCollection(CN, ctx, token, onError, tagName, tag) { + const coll = token.type === 'block-map' + ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag) + : token.type === 'block-seq' + ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag) + : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag); + const Coll = coll.constructor; + // If we got a tagName matching the class, or the tag name is '!', + // then use the tagName from the node class used to create it. + if (tagName === '!' || tagName === Coll.tagName) { + coll.tag = Coll.tagName; + return coll; + } + if (tagName) + coll.tag = tagName; + return coll; +} +function composeCollection(CN, ctx, token, tagToken, onError) { + const tagName = !tagToken + ? null + : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); + const expType = token.type === 'block-map' + ? 'map' + : token.type === 'block-seq' + ? 'seq' + : token.start.source === '{' + ? 'map' + : 'seq'; + // shortcut: check if it's a generic YAMLMap or YAMLSeq + // before jumping into the custom tag logic. + if (!tagToken || + !tagName || + tagName === '!' || + (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') || + (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq') || + !expType) { + return resolveCollection(CN, ctx, token, onError, tagName); + } + let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); + if (!tag) { + const kt = ctx.schema.knownTags[tagName]; + if (kt && kt.collection === expType) { + ctx.schema.tags.push(Object.assign({}, kt, { default: false })); + tag = kt; + } + else { + if (kt?.collection) { + onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); + } + else { + onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); + } + return resolveCollection(CN, ctx, token, onError, tagName); + } + } + const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); + const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; + const node = identity.isNode(res) + ? res + : new Scalar.Scalar(res); + node.range = coll.range; + node.tag = tagName; + if (tag?.format) + node.format = tag.format; + return node; +} + +exports.composeCollection = composeCollection; + + +/***/ }), + +/***/ 5050: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Document = __nccwpck_require__(42); +var composeNode = __nccwpck_require__(8676); +var resolveEnd = __nccwpck_require__(1250); +var resolveProps = __nccwpck_require__(6985); + +function composeDoc(options, directives, { offset, start, value, end }, onError) { + const opts = Object.assign({ _directives: directives }, options); + const doc = new Document.Document(undefined, opts); + const ctx = { + atRoot: true, + directives: doc.directives, + options: doc.options, + schema: doc.schema + }; + const props = resolveProps.resolveProps(start, { + indicator: 'doc-start', + next: value ?? end?.[0], + offset, + onError, + startOnNewline: true + }); + if (props.found) { + doc.directives.docStart = true; + if (value && + (value.type === 'block-map' || value.type === 'block-seq') && + !props.hasNewline) + onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); + } + // @ts-expect-error If Contents is set, let's trust the user + doc.contents = value + ? composeNode.composeNode(ctx, value, props, onError) + : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); + const contentEnd = doc.contents.range[2]; + const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); + if (re.comment) + doc.comment = re.comment; + doc.range = [offset, contentEnd, re.offset]; + return doc; +} + +exports.composeDoc = composeDoc; + + +/***/ }), + +/***/ 8676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var composeCollection = __nccwpck_require__(8109); +var composeScalar = __nccwpck_require__(4766); +var resolveEnd = __nccwpck_require__(1250); +var utilEmptyScalarPosition = __nccwpck_require__(8781); + +const CN = { composeNode, composeEmptyNode }; +function composeNode(ctx, token, props, onError) { + const { spaceBefore, comment, anchor, tag } = props; + let node; + let isSrcToken = true; + switch (token.type) { + case 'alias': + node = composeAlias(ctx, token, onError); + if (anchor || tag) + onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); + break; + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + case 'block-scalar': + node = composeScalar.composeScalar(ctx, token, tag, onError); + if (anchor) + node.anchor = anchor.source.substring(1); + break; + case 'block-map': + case 'block-seq': + case 'flow-collection': + node = composeCollection.composeCollection(CN, ctx, token, tag, onError); + if (anchor) + node.anchor = anchor.source.substring(1); + break; + default: { + const message = token.type === 'error' + ? token.message + : `Unsupported token (type: ${token.type})`; + onError(token, 'UNEXPECTED_TOKEN', message); + node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); + isSrcToken = false; + } + } + if (anchor && node.anchor === '') + onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); + if (spaceBefore) + node.spaceBefore = true; + if (comment) { + if (token.type === 'scalar' && token.source === '') + node.comment = comment; + else + node.commentBefore = comment; + } + // @ts-expect-error Type checking misses meaning of isSrcToken + if (ctx.options.keepSourceTokens && isSrcToken) + node.srcToken = token; + return node; +} +function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { + const token = { + type: 'scalar', + offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), + indent: -1, + source: '' + }; + const node = composeScalar.composeScalar(ctx, token, tag, onError); + if (anchor) { + node.anchor = anchor.source.substring(1); + if (node.anchor === '') + onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); + } + if (spaceBefore) + node.spaceBefore = true; + if (comment) { + node.comment = comment; + node.range[2] = end; + } + return node; +} +function composeAlias({ options }, { offset, source, end }, onError) { + const alias = new Alias.Alias(source.substring(1)); + if (alias.source === '') + onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); + if (alias.source.endsWith(':')) + onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); + const valueEnd = offset + source.length; + const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); + alias.range = [offset, valueEnd, re.offset]; + if (re.comment) + alias.comment = re.comment; + return alias; +} + +exports.composeEmptyNode = composeEmptyNode; +exports.composeNode = composeNode; + + +/***/ }), + +/***/ 4766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var resolveBlockScalar = __nccwpck_require__(9485); +var resolveFlowScalar = __nccwpck_require__(7578); + +function composeScalar(ctx, token, tagToken, onError) { + const { value, type, comment, range } = token.type === 'block-scalar' + ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) + : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); + const tagName = tagToken + ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) + : null; + const tag = tagToken && tagName + ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) + : token.type === 'scalar' + ? findScalarTagByTest(ctx, value, token, onError) + : ctx.schema[identity.SCALAR]; + let scalar; + try { + const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); + scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res); + } + catch (error) { + const msg = error instanceof Error ? error.message : String(error); + onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); + scalar = new Scalar.Scalar(value); + } + scalar.range = range; + scalar.source = value; + if (type) + scalar.type = type; + if (tagName) + scalar.tag = tagName; + if (tag.format) + scalar.format = tag.format; + if (comment) + scalar.comment = comment; + return scalar; +} +function findScalarTagByName(schema, value, tagName, tagToken, onError) { + if (tagName === '!') + return schema[identity.SCALAR]; // non-specific tag + const matchWithTest = []; + for (const tag of schema.tags) { + if (!tag.collection && tag.tag === tagName) { + if (tag.default && tag.test) + matchWithTest.push(tag); + else + return tag; + } + } + for (const tag of matchWithTest) + if (tag.test?.test(value)) + return tag; + const kt = schema.knownTags[tagName]; + if (kt && !kt.collection) { + // Ensure that the known tag is available for stringifying, + // but does not get used by default. + schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); + return kt; + } + onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); + return schema[identity.SCALAR]; +} +function findScalarTagByTest({ directives, schema }, value, token, onError) { + const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[identity.SCALAR]; + if (schema.compat) { + const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? + schema[identity.SCALAR]; + if (tag.tag !== compat.tag) { + const ts = directives.tagString(tag.tag); + const cs = directives.tagString(compat.tag); + const msg = `Value may be parsed as either ${ts} or ${cs}`; + onError(token, 'TAG_RESOLVE_FAILED', msg, true); + } + } + return tag; +} + +exports.composeScalar = composeScalar; + + +/***/ }), + +/***/ 9493: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var directives = __nccwpck_require__(5400); +var Document = __nccwpck_require__(42); +var errors = __nccwpck_require__(4236); +var identity = __nccwpck_require__(5589); +var composeDoc = __nccwpck_require__(5050); +var resolveEnd = __nccwpck_require__(1250); + +function getErrorPos(src) { + if (typeof src === 'number') + return [src, src + 1]; + if (Array.isArray(src)) + return src.length === 2 ? src : [src[0], src[1]]; + const { offset, source } = src; + return [offset, offset + (typeof source === 'string' ? source.length : 1)]; +} +function parsePrelude(prelude) { + let comment = ''; + let atComment = false; + let afterEmptyLine = false; + for (let i = 0; i < prelude.length; ++i) { + const source = prelude[i]; + switch (source[0]) { + case '#': + comment += + (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + + (source.substring(1) || ' '); + atComment = true; + afterEmptyLine = false; + break; + case '%': + if (prelude[i + 1]?.[0] !== '#') + i += 1; + atComment = false; + break; + default: + // This may be wrong after doc-end, but in that case it doesn't matter + if (!atComment) + afterEmptyLine = true; + atComment = false; + } + } + return { comment, afterEmptyLine }; +} +/** + * Compose a stream of CST nodes into a stream of YAML Documents. + * + * ```ts + * import { Composer, Parser } from 'yaml' + * + * const src: string = ... + * const tokens = new Parser().parse(src) + * const docs = new Composer().compose(tokens) + * ``` + */ +class Composer { + constructor(options = {}) { + this.doc = null; + this.atDirectives = false; + this.prelude = []; + this.errors = []; + this.warnings = []; + this.onError = (source, code, message, warning) => { + const pos = getErrorPos(source); + if (warning) + this.warnings.push(new errors.YAMLWarning(pos, code, message)); + else + this.errors.push(new errors.YAMLParseError(pos, code, message)); + }; + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + this.directives = new directives.Directives({ version: options.version || '1.2' }); + this.options = options; + } + decorate(doc, afterDoc) { + const { comment, afterEmptyLine } = parsePrelude(this.prelude); + //console.log({ dc: doc.comment, prelude, comment }) + if (comment) { + const dc = doc.contents; + if (afterDoc) { + doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; + } + else if (afterEmptyLine || doc.directives.docStart || !dc) { + doc.commentBefore = comment; + } + else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) { + let it = dc.items[0]; + if (identity.isPair(it)) + it = it.key; + const cb = it.commentBefore; + it.commentBefore = cb ? `${comment}\n${cb}` : comment; + } + else { + const cb = dc.commentBefore; + dc.commentBefore = cb ? `${comment}\n${cb}` : comment; + } + } + if (afterDoc) { + Array.prototype.push.apply(doc.errors, this.errors); + Array.prototype.push.apply(doc.warnings, this.warnings); + } + else { + doc.errors = this.errors; + doc.warnings = this.warnings; + } + this.prelude = []; + this.errors = []; + this.warnings = []; + } + /** + * Current stream status information. + * + * Mostly useful at the end of input for an empty stream. + */ + streamInfo() { + return { + comment: parsePrelude(this.prelude).comment, + directives: this.directives, + errors: this.errors, + warnings: this.warnings + }; + } + /** + * Compose tokens into documents. + * + * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. + * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. + */ + *compose(tokens, forceDoc = false, endOffset = -1) { + for (const token of tokens) + yield* this.next(token); + yield* this.end(forceDoc, endOffset); + } + /** Advance the composer by one CST token. */ + *next(token) { + if (process.env.LOG_STREAM) + console.dir(token, { depth: null }); + switch (token.type) { + case 'directive': + this.directives.add(token.source, (offset, message, warning) => { + const pos = getErrorPos(token); + pos[0] += offset; + this.onError(pos, 'BAD_DIRECTIVE', message, warning); + }); + this.prelude.push(token.source); + this.atDirectives = true; + break; + case 'document': { + const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); + if (this.atDirectives && !doc.directives.docStart) + this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); + this.decorate(doc, false); + if (this.doc) + yield this.doc; + this.doc = doc; + this.atDirectives = false; + break; + } + case 'byte-order-mark': + case 'space': + break; + case 'comment': + case 'newline': + this.prelude.push(token.source); + break; + case 'error': { + const msg = token.source + ? `${token.message}: ${JSON.stringify(token.source)}` + : token.message; + const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); + if (this.atDirectives || !this.doc) + this.errors.push(error); + else + this.doc.errors.push(error); + break; + } + case 'doc-end': { + if (!this.doc) { + const msg = 'Unexpected doc-end without preceding document'; + this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); + break; + } + this.doc.directives.docEnd = true; + const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); + this.decorate(this.doc, true); + if (end.comment) { + const dc = this.doc.comment; + this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; + } + this.doc.range[2] = end.offset; + break; + } + default: + this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); + } + } + /** + * Call at end of input to yield any remaining document. + * + * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. + * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. + */ + *end(forceDoc = false, endOffset = -1) { + if (this.doc) { + this.decorate(this.doc, true); + yield this.doc; + this.doc = null; + } + else if (forceDoc) { + const opts = Object.assign({ _directives: this.directives }, this.options); + const doc = new Document.Document(undefined, opts); + if (this.atDirectives) + this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); + doc.range = [0, endOffset, endOffset]; + this.decorate(doc, false); + yield doc; + } + } +} + +exports.Composer = Composer; + + +/***/ }), + +/***/ 2986: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); +var resolveProps = __nccwpck_require__(6985); +var utilContainsNewline = __nccwpck_require__(976); +var utilFlowIndentCheck = __nccwpck_require__(3669); +var utilMapIncludes = __nccwpck_require__(6899); + +const startColMsg = 'All mapping items must start at the same column'; +function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { + const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap; + const map = new NodeClass(ctx.schema); + if (ctx.atRoot) + ctx.atRoot = false; + let offset = bm.offset; + let commentEnd = null; + for (const collItem of bm.items) { + const { start, key, sep, value } = collItem; + // key properties + const keyProps = resolveProps.resolveProps(start, { + indicator: 'explicit-key-ind', + next: key ?? sep?.[0], + offset, + onError, + startOnNewline: true + }); + const implicitKey = !keyProps.found; + if (implicitKey) { + if (key) { + if (key.type === 'block-seq') + onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); + else if ('indent' in key && key.indent !== bm.indent) + onError(offset, 'BAD_INDENT', startColMsg); + } + if (!keyProps.anchor && !keyProps.tag && !sep) { + commentEnd = keyProps.end; + if (keyProps.comment) { + if (map.comment) + map.comment += '\n' + keyProps.comment; + else + map.comment = keyProps.comment; + } + continue; + } + if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { + onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); + } + } + else if (keyProps.found?.indent !== bm.indent) { + onError(offset, 'BAD_INDENT', startColMsg); + } + // key value + const keyStart = keyProps.end; + const keyNode = key + ? composeNode(ctx, key, keyProps, onError) + : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); + if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) + onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); + // value properties + const valueProps = resolveProps.resolveProps(sep ?? [], { + indicator: 'map-value-ind', + next: value, + offset: keyNode.range[2], + onError, + startOnNewline: !key || key.type === 'block-scalar' + }); + offset = valueProps.end; + if (valueProps.found) { + if (implicitKey) { + if (value?.type === 'block-map' && !valueProps.hasNewline) + onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); + if (ctx.options.strict && + keyProps.start < valueProps.found.offset - 1024) + onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); + } + // value value + const valueNode = value + ? composeNode(ctx, value, valueProps, onError) + : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); + offset = valueNode.range[2]; + const pair = new Pair.Pair(keyNode, valueNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + map.items.push(pair); + } + else { + // key with no value + if (implicitKey) + onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); + if (valueProps.comment) { + if (keyNode.comment) + keyNode.comment += '\n' + valueProps.comment; + else + keyNode.comment = valueProps.comment; + } + const pair = new Pair.Pair(keyNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + map.items.push(pair); + } + } + if (commentEnd && commentEnd < offset) + onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); + map.range = [bm.offset, offset, commentEnd ?? offset]; + return map; +} + +exports.resolveBlockMap = resolveBlockMap; + + +/***/ }), + +/***/ 9485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +function resolveBlockScalar(scalar, strict, onError) { + const start = scalar.offset; + const header = parseBlockScalarHeader(scalar, strict, onError); + if (!header) + return { value: '', type: null, comment: '', range: [start, start, start] }; + const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; + const lines = scalar.source ? splitLines(scalar.source) : []; + // determine the end of content & start of chomping + let chompStart = lines.length; + for (let i = lines.length - 1; i >= 0; --i) { + const content = lines[i][1]; + if (content === '' || content === '\r') + chompStart = i; + else + break; + } + // shortcut for empty contents + if (chompStart === 0) { + const value = header.chomp === '+' && lines.length > 0 + ? '\n'.repeat(Math.max(1, lines.length - 1)) + : ''; + let end = start + header.length; + if (scalar.source) + end += scalar.source.length; + return { value, type, comment: header.comment, range: [start, end, end] }; + } + // find the indentation level to trim from start + let trimIndent = scalar.indent + header.indent; + let offset = scalar.offset + header.length; + let contentStart = 0; + for (let i = 0; i < chompStart; ++i) { + const [indent, content] = lines[i]; + if (content === '' || content === '\r') { + if (header.indent === 0 && indent.length > trimIndent) + trimIndent = indent.length; + } + else { + if (indent.length < trimIndent) { + const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; + onError(offset + indent.length, 'MISSING_CHAR', message); + } + if (header.indent === 0) + trimIndent = indent.length; + contentStart = i; + break; + } + offset += indent.length + content.length + 1; + } + // include trailing more-indented empty lines in content + for (let i = lines.length - 1; i >= chompStart; --i) { + if (lines[i][0].length > trimIndent) + chompStart = i + 1; + } + let value = ''; + let sep = ''; + let prevMoreIndented = false; + // leading whitespace is kept intact + for (let i = 0; i < contentStart; ++i) + value += lines[i][0].slice(trimIndent) + '\n'; + for (let i = contentStart; i < chompStart; ++i) { + let [indent, content] = lines[i]; + offset += indent.length + content.length + 1; + const crlf = content[content.length - 1] === '\r'; + if (crlf) + content = content.slice(0, -1); + /* istanbul ignore if already caught in lexer */ + if (content && indent.length < trimIndent) { + const src = header.indent + ? 'explicit indentation indicator' + : 'first line'; + const message = `Block scalar lines must not be less indented than their ${src}`; + onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); + indent = ''; + } + if (type === Scalar.Scalar.BLOCK_LITERAL) { + value += sep + indent.slice(trimIndent) + content; + sep = '\n'; + } + else if (indent.length > trimIndent || content[0] === '\t') { + // more-indented content within a folded block + if (sep === ' ') + sep = '\n'; + else if (!prevMoreIndented && sep === '\n') + sep = '\n\n'; + value += sep + indent.slice(trimIndent) + content; + sep = '\n'; + prevMoreIndented = true; + } + else if (content === '') { + // empty line + if (sep === '\n') + value += '\n'; + else + sep = '\n'; + } + else { + value += sep + content; + sep = ' '; + prevMoreIndented = false; + } + } + switch (header.chomp) { + case '-': + break; + case '+': + for (let i = chompStart; i < lines.length; ++i) + value += '\n' + lines[i][0].slice(trimIndent); + if (value[value.length - 1] !== '\n') + value += '\n'; + break; + default: + value += '\n'; + } + const end = start + header.length + scalar.source.length; + return { value, type, comment: header.comment, range: [start, end, end] }; +} +function parseBlockScalarHeader({ offset, props }, strict, onError) { + /* istanbul ignore if should not happen */ + if (props[0].type !== 'block-scalar-header') { + onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); + return null; + } + const { source } = props[0]; + const mode = source[0]; + let indent = 0; + let chomp = ''; + let error = -1; + for (let i = 1; i < source.length; ++i) { + const ch = source[i]; + if (!chomp && (ch === '-' || ch === '+')) + chomp = ch; + else { + const n = Number(ch); + if (!indent && n) + indent = n; + else if (error === -1) + error = offset + i; + } + } + if (error !== -1) + onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); + let hasSpace = false; + let comment = ''; + let length = source.length; + for (let i = 1; i < props.length; ++i) { + const token = props[i]; + switch (token.type) { + case 'space': + hasSpace = true; + // fallthrough + case 'newline': + length += token.source.length; + break; + case 'comment': + if (strict && !hasSpace) { + const message = 'Comments must be separated from other tokens by white space characters'; + onError(token, 'MISSING_CHAR', message); + } + length += token.source.length; + comment = token.source.substring(1); + break; + case 'error': + onError(token, 'UNEXPECTED_TOKEN', token.message); + length += token.source.length; + break; + /* istanbul ignore next should not happen */ + default: { + const message = `Unexpected token in block scalar header: ${token.type}`; + onError(token, 'UNEXPECTED_TOKEN', message); + const ts = token.source; + if (ts && typeof ts === 'string') + length += ts.length; + } + } + } + return { mode, indent, chomp, comment, length }; +} +/** @returns Array of lines split up as `[indent, content]` */ +function splitLines(source) { + const split = source.split(/\n( *)/); + const first = split[0]; + const m = first.match(/^( *)/); + const line0 = m?.[1] + ? [m[1], first.slice(m[1].length)] + : ['', first]; + const lines = [line0]; + for (let i = 1; i < split.length; i += 2) + lines.push([split[i], split[i + 1]]); + return lines; +} + +exports.resolveBlockScalar = resolveBlockScalar; + + +/***/ }), + +/***/ 2289: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var YAMLSeq = __nccwpck_require__(5161); +var resolveProps = __nccwpck_require__(6985); +var utilFlowIndentCheck = __nccwpck_require__(3669); + +function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { + const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq; + const seq = new NodeClass(ctx.schema); + if (ctx.atRoot) + ctx.atRoot = false; + let offset = bs.offset; + let commentEnd = null; + for (const { start, value } of bs.items) { + const props = resolveProps.resolveProps(start, { + indicator: 'seq-item-ind', + next: value, + offset, + onError, + startOnNewline: true + }); + if (!props.found) { + if (props.anchor || props.tag || value) { + if (value && value.type === 'block-seq') + onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); + else + onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); + } + else { + commentEnd = props.end; + if (props.comment) + seq.comment = props.comment; + continue; + } + } + const node = value + ? composeNode(ctx, value, props, onError) + : composeEmptyNode(ctx, props.end, start, null, props, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); + offset = node.range[2]; + seq.items.push(node); + } + seq.range = [bs.offset, offset, commentEnd ?? offset]; + return seq; +} + +exports.resolveBlockSeq = resolveBlockSeq; + + +/***/ }), + +/***/ 1250: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function resolveEnd(end, offset, reqSpace, onError) { + let comment = ''; + if (end) { + let hasSpace = false; + let sep = ''; + for (const token of end) { + const { source, type } = token; + switch (type) { + case 'space': + hasSpace = true; + break; + case 'comment': { + if (reqSpace && !hasSpace) + onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); + const cb = source.substring(1) || ' '; + if (!comment) + comment = cb; + else + comment += sep + cb; + sep = ''; + break; + } + case 'newline': + if (comment) + sep += source; + hasSpace = true; + break; + default: + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); + } + offset += source.length; + } + } + return { comment, offset }; +} + +exports.resolveEnd = resolveEnd; + + +/***/ }), + +/***/ 45: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var resolveEnd = __nccwpck_require__(1250); +var resolveProps = __nccwpck_require__(6985); +var utilContainsNewline = __nccwpck_require__(976); +var utilMapIncludes = __nccwpck_require__(6899); + +const blockMsg = 'Block collections are not allowed within flow collections'; +const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); +function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { + const isMap = fc.start.source === '{'; + const fcName = isMap ? 'flow map' : 'flow sequence'; + const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq)); + const coll = new NodeClass(ctx.schema); + coll.flow = true; + const atRoot = ctx.atRoot; + if (atRoot) + ctx.atRoot = false; + let offset = fc.offset + fc.start.source.length; + for (let i = 0; i < fc.items.length; ++i) { + const collItem = fc.items[i]; + const { start, key, sep, value } = collItem; + const props = resolveProps.resolveProps(start, { + flow: fcName, + indicator: 'explicit-key-ind', + next: key ?? sep?.[0], + offset, + onError, + startOnNewline: false + }); + if (!props.found) { + if (!props.anchor && !props.tag && !sep && !value) { + if (i === 0 && props.comma) + onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); + else if (i < fc.items.length - 1) + onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); + if (props.comment) { + if (coll.comment) + coll.comment += '\n' + props.comment; + else + coll.comment = props.comment; + } + offset = props.end; + continue; + } + if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) + onError(key, // checked by containsNewline() + 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); + } + if (i === 0) { + if (props.comma) + onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); + } + else { + if (!props.comma) + onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); + if (props.comment) { + let prevItemComment = ''; + loop: for (const st of start) { + switch (st.type) { + case 'comma': + case 'space': + break; + case 'comment': + prevItemComment = st.source.substring(1); + break loop; + default: + break loop; + } + } + if (prevItemComment) { + let prev = coll.items[coll.items.length - 1]; + if (identity.isPair(prev)) + prev = prev.value ?? prev.key; + if (prev.comment) + prev.comment += '\n' + prevItemComment; + else + prev.comment = prevItemComment; + props.comment = props.comment.substring(prevItemComment.length + 1); + } + } + } + if (!isMap && !sep && !props.found) { + // item is a value in a seq + // → key & sep are empty, start does not include ? or : + const valueNode = value + ? composeNode(ctx, value, props, onError) + : composeEmptyNode(ctx, props.end, sep, null, props, onError); + coll.items.push(valueNode); + offset = valueNode.range[2]; + if (isBlock(value)) + onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); + } + else { + // item is a key+value pair + // key value + const keyStart = props.end; + const keyNode = key + ? composeNode(ctx, key, props, onError) + : composeEmptyNode(ctx, keyStart, start, null, props, onError); + if (isBlock(key)) + onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); + // value properties + const valueProps = resolveProps.resolveProps(sep ?? [], { + flow: fcName, + indicator: 'map-value-ind', + next: value, + offset: keyNode.range[2], + onError, + startOnNewline: false + }); + if (valueProps.found) { + if (!isMap && !props.found && ctx.options.strict) { + if (sep) + for (const st of sep) { + if (st === valueProps.found) + break; + if (st.type === 'newline') { + onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); + break; + } + } + if (props.start < valueProps.found.offset - 1024) + onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); + } + } + else if (value) { + if ('source' in value && value.source && value.source[0] === ':') + onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); + else + onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); + } + // value value + const valueNode = value + ? composeNode(ctx, value, valueProps, onError) + : valueProps.found + ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) + : null; + if (valueNode) { + if (isBlock(value)) + onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); + } + else if (valueProps.comment) { + if (keyNode.comment) + keyNode.comment += '\n' + valueProps.comment; + else + keyNode.comment = valueProps.comment; + } + const pair = new Pair.Pair(keyNode, valueNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + if (isMap) { + const map = coll; + if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) + onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); + map.items.push(pair); + } + else { + const map = new YAMLMap.YAMLMap(ctx.schema); + map.flow = true; + map.items.push(pair); + coll.items.push(map); + } + offset = valueNode ? valueNode.range[2] : valueProps.end; + } + } + const expectedEnd = isMap ? '}' : ']'; + const [ce, ...ee] = fc.end; + let cePos = offset; + if (ce && ce.source === expectedEnd) + cePos = ce.offset + ce.source.length; + else { + const name = fcName[0].toUpperCase() + fcName.substring(1); + const msg = atRoot + ? `${name} must end with a ${expectedEnd}` + : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; + onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); + if (ce && ce.source.length !== 1) + ee.unshift(ce); + } + if (ee.length > 0) { + const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); + if (end.comment) { + if (coll.comment) + coll.comment += '\n' + end.comment; + else + coll.comment = end.comment; + } + coll.range = [fc.offset, cePos, end.offset]; + } + else { + coll.range = [fc.offset, cePos, cePos]; + } + return coll; +} + +exports.resolveFlowCollection = resolveFlowCollection; + + +/***/ }), + +/***/ 7578: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var resolveEnd = __nccwpck_require__(1250); + +function resolveFlowScalar(scalar, strict, onError) { + const { offset, type, source, end } = scalar; + let _type; + let value; + const _onError = (rel, code, msg) => onError(offset + rel, code, msg); + switch (type) { + case 'scalar': + _type = Scalar.Scalar.PLAIN; + value = plainValue(source, _onError); + break; + case 'single-quoted-scalar': + _type = Scalar.Scalar.QUOTE_SINGLE; + value = singleQuotedValue(source, _onError); + break; + case 'double-quoted-scalar': + _type = Scalar.Scalar.QUOTE_DOUBLE; + value = doubleQuotedValue(source, _onError); + break; + /* istanbul ignore next should not happen */ + default: + onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); + return { + value: '', + type: null, + comment: '', + range: [offset, offset + source.length, offset + source.length] + }; + } + const valueEnd = offset + source.length; + const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); + return { + value, + type: _type, + comment: re.comment, + range: [offset, valueEnd, re.offset] + }; +} +function plainValue(source, onError) { + let badChar = ''; + switch (source[0]) { + /* istanbul ignore next should not happen */ + case '\t': + badChar = 'a tab character'; + break; + case ',': + badChar = 'flow indicator character ,'; + break; + case '%': + badChar = 'directive indicator character %'; + break; + case '|': + case '>': { + badChar = `block scalar indicator ${source[0]}`; + break; + } + case '@': + case '`': { + badChar = `reserved character ${source[0]}`; + break; + } + } + if (badChar) + onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); + return foldLines(source); +} +function singleQuotedValue(source, onError) { + if (source[source.length - 1] !== "'" || source.length === 1) + onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); + return foldLines(source.slice(1, -1)).replace(/''/g, "'"); +} +function foldLines(source) { + /** + * The negative lookbehind here and in the `re` RegExp is to + * prevent causing a polynomial search time in certain cases. + * + * The try-catch is for Safari, which doesn't support this yet: + * https://caniuse.com/js-regexp-lookbehind + */ + let first, line; + try { + first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; + } + else { + res += ch; + } + } + if (source[source.length - 1] !== '"' || source.length === 1) + onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); + return res; +} +/** + * Fold a single newline into a space, multiple newlines to N - 1 newlines. + * Presumes `source[offset] === '\n'` + */ +function foldNewline(source, offset) { + let fold = ''; + let ch = source[offset + 1]; + while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { + if (ch === '\r' && source[offset + 2] !== '\n') + break; + if (ch === '\n') + fold += '\n'; + offset += 1; + ch = source[offset + 1]; + } + if (!fold) + fold = ' '; + return { fold, offset }; +} +const escapeCodes = { + '0': '\0', + a: '\x07', + b: '\b', + e: '\x1b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + v: '\v', + N: '\u0085', + _: '\u00a0', + L: '\u2028', + P: '\u2029', + ' ': ' ', + '"': '"', + '/': '/', + '\\': '\\', + '\t': '\t' +}; +function parseCharCode(source, offset, length, onError) { + const cc = source.substr(offset, length); + const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); + const code = ok ? parseInt(cc, 16) : NaN; + if (isNaN(code)) { + const raw = source.substr(offset - 2, length + 2); + onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); + return raw; + } + return String.fromCodePoint(code); +} + +exports.resolveFlowScalar = resolveFlowScalar; + + +/***/ }), + +/***/ 6985: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { + let spaceBefore = false; + let atNewline = startOnNewline; + let hasSpace = startOnNewline; + let comment = ''; + let commentSep = ''; + let hasNewline = false; + let hasNewlineAfterProp = false; + let reqSpace = false; + let anchor = null; + let tag = null; + let comma = null; + let found = null; + let start = null; + for (const token of tokens) { + if (reqSpace) { + if (token.type !== 'space' && + token.type !== 'newline' && + token.type !== 'comma') + onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); + reqSpace = false; + } + switch (token.type) { + case 'space': + // At the doc level, tabs at line start may be parsed + // as leading white space rather than indentation. + // In a flow collection, only the parser handles indent. + if (!flow && + atNewline && + indicator !== 'doc-start' && + token.source[0] === '\t') + onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); + hasSpace = true; + break; + case 'comment': { + if (!hasSpace) + onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); + const cb = token.source.substring(1) || ' '; + if (!comment) + comment = cb; + else + comment += commentSep + cb; + commentSep = ''; + atNewline = false; + break; + } + case 'newline': + if (atNewline) { + if (comment) + comment += token.source; + else + spaceBefore = true; + } + else + commentSep += token.source; + atNewline = true; + hasNewline = true; + if (anchor || tag) + hasNewlineAfterProp = true; + hasSpace = true; + break; + case 'anchor': + if (anchor) + onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); + if (token.source.endsWith(':')) + onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); + anchor = token; + if (start === null) + start = token.offset; + atNewline = false; + hasSpace = false; + reqSpace = true; + break; + case 'tag': { + if (tag) + onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); + tag = token; + if (start === null) + start = token.offset; + atNewline = false; + hasSpace = false; + reqSpace = true; + break; + } + case indicator: + // Could here handle preceding comments differently + if (anchor || tag) + onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); + if (found) + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); + found = token; + atNewline = false; + hasSpace = false; + break; + case 'comma': + if (flow) { + if (comma) + onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); + comma = token; + atNewline = false; + hasSpace = false; + break; + } + // else fallthrough + default: + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); + atNewline = false; + hasSpace = false; + } + } + const last = tokens[tokens.length - 1]; + const end = last ? last.offset + last.source.length : offset; + if (reqSpace && + next && + next.type !== 'space' && + next.type !== 'newline' && + next.type !== 'comma' && + (next.type !== 'scalar' || next.source !== '')) + onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); + return { + comma, + found, + spaceBefore, + comment, + hasNewline, + hasNewlineAfterProp, + anchor, + tag, + end, + start: start ?? end + }; +} + +exports.resolveProps = resolveProps; + + +/***/ }), + +/***/ 976: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function containsNewline(key) { + if (!key) + return null; + switch (key.type) { + case 'alias': + case 'scalar': + case 'double-quoted-scalar': + case 'single-quoted-scalar': + if (key.source.includes('\n')) + return true; + if (key.end) + for (const st of key.end) + if (st.type === 'newline') + return true; + return false; + case 'flow-collection': + for (const it of key.items) { + for (const st of it.start) + if (st.type === 'newline') + return true; + if (it.sep) + for (const st of it.sep) + if (st.type === 'newline') + return true; + if (containsNewline(it.key) || containsNewline(it.value)) + return true; + } + return false; + default: + return true; + } +} + +exports.containsNewline = containsNewline; + + +/***/ }), + +/***/ 8781: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function emptyScalarPosition(offset, before, pos) { + if (before) { + if (pos === null) + pos = before.length; + for (let i = pos - 1; i >= 0; --i) { + let st = before[i]; + switch (st.type) { + case 'space': + case 'comment': + case 'newline': + offset -= st.source.length; + continue; + } + // Technically, an empty scalar is immediately after the last non-empty + // node, but it's more useful to place it after any whitespace. + st = before[++i]; + while (st?.type === 'space') { + offset += st.source.length; + st = before[++i]; + } + break; + } + } + return offset; +} + +exports.emptyScalarPosition = emptyScalarPosition; + + +/***/ }), + +/***/ 3669: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var utilContainsNewline = __nccwpck_require__(976); + +function flowIndentCheck(indent, fc, onError) { + if (fc?.type === 'flow-collection') { + const end = fc.end[0]; + if (end.indent === indent && + (end.source === ']' || end.source === '}') && + utilContainsNewline.containsNewline(fc)) { + const msg = 'Flow end indicator should be more indented than parent'; + onError(end, 'BAD_INDENT', msg, true); + } + } +} + +exports.flowIndentCheck = flowIndentCheck; + + +/***/ }), + +/***/ 6899: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +function mapIncludes(ctx, items, search) { + const { uniqueKeys } = ctx.options; + if (uniqueKeys === false) + return false; + const isEqual = typeof uniqueKeys === 'function' + ? uniqueKeys + : (a, b) => a === b || + (identity.isScalar(a) && + identity.isScalar(b) && + a.value === b.value && + !(a.value === '<<' && ctx.schema.merge)); + return items.some(pair => isEqual(pair.key, search)); +} + +exports.mapIncludes = mapIncludes; + + +/***/ }), + +/***/ 42: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var toJS = __nccwpck_require__(2463); +var Schema = __nccwpck_require__(6831); +var stringifyDocument = __nccwpck_require__(5225); +var anchors = __nccwpck_require__(8459); +var applyReviver = __nccwpck_require__(3412); +var createNode = __nccwpck_require__(9652); +var directives = __nccwpck_require__(5400); + +class Document { + constructor(value, replacer, options) { + /** A comment before this Document */ + this.commentBefore = null; + /** A comment immediately after this Document */ + this.comment = null; + /** Errors encountered during parsing. */ + this.errors = []; + /** Warnings encountered during parsing. */ + this.warnings = []; + Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC }); + let _replacer = null; + if (typeof replacer === 'function' || Array.isArray(replacer)) { + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + replacer = undefined; + } + const opt = Object.assign({ + intAsBigInt: false, + keepSourceTokens: false, + logLevel: 'warn', + prettyErrors: true, + strict: true, + uniqueKeys: true, + version: '1.2' + }, options); + this.options = opt; + let { version } = opt; + if (options?._directives) { + this.directives = options._directives.atDocument(); + if (this.directives.yaml.explicit) + version = this.directives.yaml.version; + } + else + this.directives = new directives.Directives({ version }); + this.setSchema(version, options); + // @ts-expect-error We can't really know that this matches Contents. + this.contents = + value === undefined ? null : this.createNode(value, _replacer, options); + } + /** + * Create a deep copy of this Document and its contents. + * + * Custom Node values that inherit from `Object` still refer to their original instances. + */ + clone() { + const copy = Object.create(Document.prototype, { + [identity.NODE_TYPE]: { value: identity.DOC } + }); + copy.commentBefore = this.commentBefore; + copy.comment = this.comment; + copy.errors = this.errors.slice(); + copy.warnings = this.warnings.slice(); + copy.options = Object.assign({}, this.options); + if (this.directives) + copy.directives = this.directives.clone(); + copy.schema = this.schema.clone(); + // @ts-expect-error We can't really know that this matches Contents. + copy.contents = identity.isNode(this.contents) + ? this.contents.clone(copy.schema) + : this.contents; + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** Adds a value to the document. */ + add(value) { + if (assertCollection(this.contents)) + this.contents.add(value); + } + /** Adds a value to the document. */ + addIn(path, value) { + if (assertCollection(this.contents)) + this.contents.addIn(path, value); + } + /** + * Create a new `Alias` node, ensuring that the target `node` has the required anchor. + * + * If `node` already has an anchor, `name` is ignored. + * Otherwise, the `node.anchor` value will be set to `name`, + * or if an anchor with that name is already present in the document, + * `name` will be used as a prefix for a new unique anchor. + * If `name` is undefined, the generated anchor will use 'a' as a prefix. + */ + createAlias(node, name) { + if (!node.anchor) { + const prev = anchors.anchorNames(this); + node.anchor = + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; + } + return new Alias.Alias(node.anchor); + } + createNode(value, replacer, options) { + let _replacer = undefined; + if (typeof replacer === 'function') { + value = replacer.call({ '': value }, '', value); + _replacer = replacer; + } + else if (Array.isArray(replacer)) { + const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; + const asStr = replacer.filter(keyToStr).map(String); + if (asStr.length > 0) + replacer = replacer.concat(asStr); + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + replacer = undefined; + } + const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; + const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + anchorPrefix || 'a'); + const ctx = { + aliasDuplicateObjects: aliasDuplicateObjects ?? true, + keepUndefined: keepUndefined ?? false, + onAnchor, + onTagObj, + replacer: _replacer, + schema: this.schema, + sourceObjects + }; + const node = createNode.createNode(value, tag, ctx); + if (flow && identity.isCollection(node)) + node.flow = true; + setAnchors(); + return node; + } + /** + * Convert a key and a value into a `Pair` using the current schema, + * recursively wrapping all values as `Scalar` or `Collection` nodes. + */ + createPair(key, value, options = {}) { + const k = this.createNode(key, null, options); + const v = this.createNode(value, null, options); + return new Pair.Pair(k, v); + } + /** + * Removes a value from the document. + * @returns `true` if the item was found and removed. + */ + delete(key) { + return assertCollection(this.contents) ? this.contents.delete(key) : false; + } + /** + * Removes a value from the document. + * @returns `true` if the item was found and removed. + */ + deleteIn(path) { + if (Collection.isEmptyPath(path)) { + if (this.contents == null) + return false; + // @ts-expect-error Presumed impossible if Strict extends false + this.contents = null; + return true; + } + return assertCollection(this.contents) + ? this.contents.deleteIn(path) + : false; + } + /** + * Returns item at `key`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + get(key, keepScalar) { + return identity.isCollection(this.contents) + ? this.contents.get(key, keepScalar) + : undefined; + } + /** + * Returns item at `path`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + getIn(path, keepScalar) { + if (Collection.isEmptyPath(path)) + return !keepScalar && identity.isScalar(this.contents) + ? this.contents.value + : this.contents; + return identity.isCollection(this.contents) + ? this.contents.getIn(path, keepScalar) + : undefined; + } + /** + * Checks if the document includes a value with the key `key`. + */ + has(key) { + return identity.isCollection(this.contents) ? this.contents.has(key) : false; + } + /** + * Checks if the document includes a value at `path`. + */ + hasIn(path) { + if (Collection.isEmptyPath(path)) + return this.contents !== undefined; + return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false; + } + /** + * Sets a value in this document. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + set(key, value) { + if (this.contents == null) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = Collection.collectionFromPath(this.schema, [key], value); + } + else if (assertCollection(this.contents)) { + this.contents.set(key, value); + } + } + /** + * Sets a value in this document. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + setIn(path, value) { + if (Collection.isEmptyPath(path)) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = value; + } + else if (this.contents == null) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); + } + else if (assertCollection(this.contents)) { + this.contents.setIn(path, value); + } + } + /** + * Change the YAML version and schema used by the document. + * A `null` version disables support for directives, explicit tags, anchors, and aliases. + * It also requires the `schema` option to be given as a `Schema` instance value. + * + * Overrides all previously set schema options. + */ + setSchema(version, options = {}) { + if (typeof version === 'number') + version = String(version); + let opt; + switch (version) { + case '1.1': + if (this.directives) + this.directives.yaml.version = '1.1'; + else + this.directives = new directives.Directives({ version: '1.1' }); + opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; + break; + case '1.2': + case 'next': + if (this.directives) + this.directives.yaml.version = version; + else + this.directives = new directives.Directives({ version }); + opt = { merge: false, resolveKnownTags: true, schema: 'core' }; + break; + case null: + if (this.directives) + delete this.directives; + opt = null; + break; + default: { + const sv = JSON.stringify(version); + throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); + } + } + // Not using `instanceof Schema` to allow for duck typing + if (options.schema instanceof Object) + this.schema = options.schema; + else if (opt) + this.schema = new Schema.Schema(Object.assign(opt, options)); + else + throw new Error(`With a null YAML version, the { schema: Schema } option is required`); + } + // json & jsonArg are only used from toJSON() + toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { + const ctx = { + anchors: new Map(), + doc: this, + keep: !json, + mapAsMap: mapAsMap === true, + mapKeyWarned: false, + maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 + }; + const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); + if (typeof onAnchor === 'function') + for (const { count, res } of ctx.anchors.values()) + onAnchor(res, count); + return typeof reviver === 'function' + ? applyReviver.applyReviver(reviver, { '': res }, '', res) + : res; + } + /** + * A JSON representation of the document `contents`. + * + * @param jsonArg Used by `JSON.stringify` to indicate the array index or + * property name. + */ + toJSON(jsonArg, onAnchor) { + return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); + } + /** A YAML representation of the document. */ + toString(options = {}) { + if (this.errors.length > 0) + throw new Error('Document with errors cannot be stringified'); + if ('indent' in options && + (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { + const s = JSON.stringify(options.indent); + throw new Error(`"indent" option must be a positive integer, not ${s}`); + } + return stringifyDocument.stringifyDocument(this, options); + } +} +function assertCollection(contents) { + if (identity.isCollection(contents)) + return true; + throw new Error('Expected a YAML collection as document contents'); +} + +exports.Document = Document; + + +/***/ }), + +/***/ 8459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var visit = __nccwpck_require__(6796); + +/** + * Verify that the input string is a valid anchor. + * + * Will throw on errors. + */ +function anchorIsValid(anchor) { + if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { + const sa = JSON.stringify(anchor); + const msg = `Anchor must not contain whitespace or control characters: ${sa}`; + throw new Error(msg); + } + return true; +} +function anchorNames(root) { + const anchors = new Set(); + visit.visit(root, { + Value(_key, node) { + if (node.anchor) + anchors.add(node.anchor); + } + }); + return anchors; +} +/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ +function findNewAnchor(prefix, exclude) { + for (let i = 1; true; ++i) { + const name = `${prefix}${i}`; + if (!exclude.has(name)) + return name; + } +} +function createNodeAnchors(doc, prefix) { + const aliasObjects = []; + const sourceObjects = new Map(); + let prevAnchors = null; + return { + onAnchor: (source) => { + aliasObjects.push(source); + if (!prevAnchors) + prevAnchors = anchorNames(doc); + const anchor = findNewAnchor(prefix, prevAnchors); + prevAnchors.add(anchor); + return anchor; + }, + /** + * With circular references, the source node is only resolved after all + * of its child nodes are. This is why anchors are set only after all of + * the nodes have been created. + */ + setAnchors: () => { + for (const source of aliasObjects) { + const ref = sourceObjects.get(source); + if (typeof ref === 'object' && + ref.anchor && + (identity.isScalar(ref.node) || identity.isCollection(ref.node))) { + ref.node.anchor = ref.anchor; + } + else { + const error = new Error('Failed to resolve repeated object (this should not happen)'); + error.source = source; + throw error; + } + } + }, + sourceObjects + }; +} + +exports.anchorIsValid = anchorIsValid; +exports.anchorNames = anchorNames; +exports.createNodeAnchors = createNodeAnchors; +exports.findNewAnchor = findNewAnchor; + + +/***/ }), + +/***/ 3412: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, + * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the + * 2021 edition: https://tc39.es/ecma262/#sec-json.parse + * + * Includes extensions for handling Map and Set objects. + */ +function applyReviver(reviver, obj, key, val) { + if (val && typeof val === 'object') { + if (Array.isArray(val)) { + for (let i = 0, len = val.length; i < len; ++i) { + const v0 = val[i]; + const v1 = applyReviver(reviver, val, String(i), v0); + if (v1 === undefined) + delete val[i]; + else if (v1 !== v0) + val[i] = v1; + } + } + else if (val instanceof Map) { + for (const k of Array.from(val.keys())) { + const v0 = val.get(k); + const v1 = applyReviver(reviver, val, k, v0); + if (v1 === undefined) + val.delete(k); + else if (v1 !== v0) + val.set(k, v1); + } + } + else if (val instanceof Set) { + for (const v0 of Array.from(val)) { + const v1 = applyReviver(reviver, val, v0, v0); + if (v1 === undefined) + val.delete(v0); + else if (v1 !== v0) { + val.delete(v0); + val.add(v1); + } + } + } + else { + for (const [k, v0] of Object.entries(val)) { + const v1 = applyReviver(reviver, val, k, v0); + if (v1 === undefined) + delete val[k]; + else if (v1 !== v0) + val[k] = v1; + } + } + } + return reviver.call(obj, key, val); +} + +exports.applyReviver = applyReviver; + + +/***/ }), + +/***/ 9652: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); + +const defaultTagPrefix = 'tag:yaml.org,2002:'; +function findTagObject(value, tagName, tags) { + if (tagName) { + const match = tags.filter(t => t.tag === tagName); + const tagObj = match.find(t => !t.format) ?? match[0]; + if (!tagObj) + throw new Error(`Tag ${tagName} not found`); + return tagObj; + } + return tags.find(t => t.identify?.(value) && !t.format); +} +function createNode(value, tagName, ctx) { + if (identity.isDocument(value)) + value = value.contents; + if (identity.isNode(value)) + return value; + if (identity.isPair(value)) { + const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx); + map.items.push(value); + return map; + } + if (value instanceof String || + value instanceof Number || + value instanceof Boolean || + (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere + ) { + // https://tc39.es/ecma262/#sec-serializejsonproperty + value = value.valueOf(); + } + const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; + // Detect duplicate references to the same object & use Alias nodes for all + // after first. The `ref` wrapper allows for circular references to resolve. + let ref = undefined; + if (aliasDuplicateObjects && value && typeof value === 'object') { + ref = sourceObjects.get(value); + if (ref) { + if (!ref.anchor) + ref.anchor = onAnchor(value); + return new Alias.Alias(ref.anchor); + } + else { + ref = { anchor: null, node: null }; + sourceObjects.set(value, ref); + } + } + if (tagName?.startsWith('!!')) + tagName = defaultTagPrefix + tagName.slice(2); + let tagObj = findTagObject(value, tagName, schema.tags); + if (!tagObj) { + if (value && typeof value.toJSON === 'function') { + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + value = value.toJSON(); + } + if (!value || typeof value !== 'object') { + const node = new Scalar.Scalar(value); + if (ref) + ref.node = node; + return node; + } + tagObj = + value instanceof Map + ? schema[identity.MAP] + : Symbol.iterator in Object(value) + ? schema[identity.SEQ] + : schema[identity.MAP]; + } + if (onTagObj) { + onTagObj(tagObj); + delete ctx.onTagObj; + } + const node = tagObj?.createNode + ? tagObj.createNode(ctx.schema, value, ctx) + : typeof tagObj?.nodeClass?.from === 'function' + ? tagObj.nodeClass.from(ctx.schema, value, ctx) + : new Scalar.Scalar(value); + if (tagName) + node.tag = tagName; + else if (!tagObj.default) + node.tag = tagObj.tag; + if (ref) + ref.node = node; + return node; +} + +exports.createNode = createNode; + + +/***/ }), + +/***/ 5400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var visit = __nccwpck_require__(6796); + +const escapeChars = { + '!': '%21', + ',': '%2C', + '[': '%5B', + ']': '%5D', + '{': '%7B', + '}': '%7D' +}; +const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); +class Directives { + constructor(yaml, tags) { + /** + * The directives-end/doc-start marker `---`. If `null`, a marker may still be + * included in the document's stringified representation. + */ + this.docStart = null; + /** The doc-end marker `...`. */ + this.docEnd = false; + this.yaml = Object.assign({}, Directives.defaultYaml, yaml); + this.tags = Object.assign({}, Directives.defaultTags, tags); + } + clone() { + const copy = new Directives(this.yaml, this.tags); + copy.docStart = this.docStart; + return copy; + } + /** + * During parsing, get a Directives instance for the current document and + * update the stream state according to the current version's spec. + */ + atDocument() { + const res = new Directives(this.yaml, this.tags); + switch (this.yaml.version) { + case '1.1': + this.atNextDocument = true; + break; + case '1.2': + this.atNextDocument = false; + this.yaml = { + explicit: Directives.defaultYaml.explicit, + version: '1.2' + }; + this.tags = Object.assign({}, Directives.defaultTags); + break; + } + return res; + } + /** + * @param onError - May be called even if the action was successful + * @returns `true` on success + */ + add(line, onError) { + if (this.atNextDocument) { + this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; + this.tags = Object.assign({}, Directives.defaultTags); + this.atNextDocument = false; + } + const parts = line.trim().split(/[ \t]+/); + const name = parts.shift(); + switch (name) { + case '%TAG': { + if (parts.length !== 2) { + onError(0, '%TAG directive should contain exactly two parts'); + if (parts.length < 2) + return false; + } + const [handle, prefix] = parts; + this.tags[handle] = prefix; + return true; + } + case '%YAML': { + this.yaml.explicit = true; + if (parts.length !== 1) { + onError(0, '%YAML directive should contain exactly one part'); + return false; + } + const [version] = parts; + if (version === '1.1' || version === '1.2') { + this.yaml.version = version; + return true; + } + else { + const isValid = /^\d+\.\d+$/.test(version); + onError(6, `Unsupported YAML version ${version}`, isValid); + return false; + } + } + default: + onError(0, `Unknown directive ${name}`, true); + return false; + } + } + /** + * Resolves a tag, matching handles to those defined in %TAG directives. + * + * @returns Resolved tag, which may also be the non-specific tag `'!'` or a + * `'!local'` tag, or `null` if unresolvable. + */ + tagName(source, onError) { + if (source === '!') + return '!'; // non-specific tag + if (source[0] !== '!') { + onError(`Not a valid tag: ${source}`); + return null; + } + if (source[1] === '<') { + const verbatim = source.slice(2, -1); + if (verbatim === '!' || verbatim === '!!') { + onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); + return null; + } + if (source[source.length - 1] !== '>') + onError('Verbatim tags must end with a >'); + return verbatim; + } + const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/); + if (!suffix) + onError(`The ${source} tag has no suffix`); + const prefix = this.tags[handle]; + if (prefix) + return prefix + decodeURIComponent(suffix); + if (handle === '!') + return source; // local tag + onError(`Could not resolve tag: ${source}`); + return null; + } + /** + * Given a fully resolved tag, returns its printable string form, + * taking into account current tag prefixes and defaults. + */ + tagString(tag) { + for (const [handle, prefix] of Object.entries(this.tags)) { + if (tag.startsWith(prefix)) + return handle + escapeTagName(tag.substring(prefix.length)); + } + return tag[0] === '!' ? tag : `!<${tag}>`; + } + toString(doc) { + const lines = this.yaml.explicit + ? [`%YAML ${this.yaml.version || '1.2'}`] + : []; + const tagEntries = Object.entries(this.tags); + let tagNames; + if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) { + const tags = {}; + visit.visit(doc.contents, (_key, node) => { + if (identity.isNode(node) && node.tag) + tags[node.tag] = true; + }); + tagNames = Object.keys(tags); + } + else + tagNames = []; + for (const [handle, prefix] of tagEntries) { + if (handle === '!!' && prefix === 'tag:yaml.org,2002:') + continue; + if (!doc || tagNames.some(tn => tn.startsWith(prefix))) + lines.push(`%TAG ${handle} ${prefix}`); + } + return lines.join('\n'); + } +} +Directives.defaultYaml = { explicit: false, version: '1.2' }; +Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; + +exports.Directives = Directives; + + +/***/ }), + +/***/ 4236: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +class YAMLError extends Error { + constructor(name, pos, code, message) { + super(); + this.name = name; + this.code = code; + this.message = message; + this.pos = pos; + } +} +class YAMLParseError extends YAMLError { + constructor(pos, code, message) { + super('YAMLParseError', pos, code, message); + } +} +class YAMLWarning extends YAMLError { + constructor(pos, code, message) { + super('YAMLWarning', pos, code, message); + } +} +const prettifyError = (src, lc) => (error) => { + if (error.pos[0] === -1) + return; + error.linePos = error.pos.map(pos => lc.linePos(pos)); + const { line, col } = error.linePos[0]; + error.message += ` at line ${line}, column ${col}`; + let ci = col - 1; + let lineStr = src + .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) + .replace(/[\n\r]+$/, ''); + // Trim to max 80 chars, keeping col position near the middle + if (ci >= 60 && lineStr.length > 80) { + const trimStart = Math.min(ci - 39, lineStr.length - 79); + lineStr = '…' + lineStr.substring(trimStart); + ci -= trimStart - 1; + } + if (lineStr.length > 80) + lineStr = lineStr.substring(0, 79) + '…'; + // Include previous line in context if pointing at line start + if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { + // Regexp won't match if start is trimmed + let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); + if (prev.length > 80) + prev = prev.substring(0, 79) + '…\n'; + lineStr = prev + lineStr; + } + if (/[^ ]/.test(lineStr)) { + let count = 1; + const end = error.linePos[1]; + if (end && end.line === line && end.col > col) { + count = Math.max(1, Math.min(end.col - col, 80 - ci)); + } + const pointer = ' '.repeat(ci) + '^'.repeat(count); + error.message += `:\n\n${lineStr}\n${pointer}\n`; + } +}; + +exports.YAMLError = YAMLError; +exports.YAMLParseError = YAMLParseError; +exports.YAMLWarning = YAMLWarning; +exports.prettifyError = prettifyError; + + +/***/ }), + +/***/ 4083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var composer = __nccwpck_require__(9493); +var Document = __nccwpck_require__(42); +var Schema = __nccwpck_require__(6831); +var errors = __nccwpck_require__(4236); +var Alias = __nccwpck_require__(5639); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var cst = __nccwpck_require__(9169); +var lexer = __nccwpck_require__(5976); +var lineCounter = __nccwpck_require__(1929); +var parser = __nccwpck_require__(3328); +var publicApi = __nccwpck_require__(8649); +var visit = __nccwpck_require__(6796); + + + +exports.Composer = composer.Composer; +exports.Document = Document.Document; +exports.Schema = Schema.Schema; +exports.YAMLError = errors.YAMLError; +exports.YAMLParseError = errors.YAMLParseError; +exports.YAMLWarning = errors.YAMLWarning; +exports.Alias = Alias.Alias; +exports.isAlias = identity.isAlias; +exports.isCollection = identity.isCollection; +exports.isDocument = identity.isDocument; +exports.isMap = identity.isMap; +exports.isNode = identity.isNode; +exports.isPair = identity.isPair; +exports.isScalar = identity.isScalar; +exports.isSeq = identity.isSeq; +exports.Pair = Pair.Pair; +exports.Scalar = Scalar.Scalar; +exports.YAMLMap = YAMLMap.YAMLMap; +exports.YAMLSeq = YAMLSeq.YAMLSeq; +exports.CST = cst; +exports.Lexer = lexer.Lexer; +exports.LineCounter = lineCounter.LineCounter; +exports.Parser = parser.Parser; +exports.parse = publicApi.parse; +exports.parseAllDocuments = publicApi.parseAllDocuments; +exports.parseDocument = publicApi.parseDocument; +exports.stringify = publicApi.stringify; +exports.visit = visit.visit; +exports.visitAsync = visit.visitAsync; + + +/***/ }), + +/***/ 6909: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function debug(logLevel, ...messages) { + if (logLevel === 'debug') + console.log(...messages); +} +function warn(logLevel, warning) { + if (logLevel === 'debug' || logLevel === 'warn') { + // https://github.com/typescript-eslint/typescript-eslint/issues/7478 + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + if (typeof process !== 'undefined' && process.emitWarning) + process.emitWarning(warning); + else + console.warn(warning); + } +} + +exports.debug = debug; +exports.warn = warn; + + +/***/ }), + +/***/ 5639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var anchors = __nccwpck_require__(8459); +var visit = __nccwpck_require__(6796); +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); +var toJS = __nccwpck_require__(2463); + +class Alias extends Node.NodeBase { + constructor(source) { + super(identity.ALIAS); + this.source = source; + Object.defineProperty(this, 'tag', { + set() { + throw new Error('Alias nodes cannot have tags'); + } + }); + } + /** + * Resolve the value of this alias within `doc`, finding the last + * instance of the `source` anchor before this node. + */ + resolve(doc) { + let found = undefined; + visit.visit(doc, { + Node: (_key, node) => { + if (node === this) + return visit.visit.BREAK; + if (node.anchor === this.source) + found = node; + } + }); + return found; + } + toJSON(_arg, ctx) { + if (!ctx) + return { source: this.source }; + const { anchors, doc, maxAliasCount } = ctx; + const source = this.resolve(doc); + if (!source) { + const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; + throw new ReferenceError(msg); + } + let data = anchors.get(source); + if (!data) { + // Resolve anchors for Node.prototype.toJS() + toJS.toJS(source, null, ctx); + data = anchors.get(source); + } + /* istanbul ignore if */ + if (!data || data.res === undefined) { + const msg = 'This should not happen: Alias anchor was not resolved?'; + throw new ReferenceError(msg); + } + if (maxAliasCount >= 0) { + data.count += 1; + if (data.aliasCount === 0) + data.aliasCount = getAliasCount(doc, source, anchors); + if (data.count * data.aliasCount > maxAliasCount) { + const msg = 'Excessive alias count indicates a resource exhaustion attack'; + throw new ReferenceError(msg); + } + } + return data.res; + } + toString(ctx, _onComment, _onChompKeep) { + const src = `*${this.source}`; + if (ctx) { + anchors.anchorIsValid(this.source); + if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { + const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; + throw new Error(msg); + } + if (ctx.implicitKey) + return `${src} `; + } + return src; + } +} +function getAliasCount(doc, node, anchors) { + if (identity.isAlias(node)) { + const source = node.resolve(doc); + const anchor = anchors && source && anchors.get(source); + return anchor ? anchor.count * anchor.aliasCount : 0; + } + else if (identity.isCollection(node)) { + let count = 0; + for (const item of node.items) { + const c = getAliasCount(doc, item, anchors); + if (c > count) + count = c; + } + return count; + } + else if (identity.isPair(node)) { + const kc = getAliasCount(doc, node.key, anchors); + const vc = getAliasCount(doc, node.value, anchors); + return Math.max(kc, vc); + } + return 1; +} + +exports.Alias = Alias; + + +/***/ }), + +/***/ 3466: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); + +function collectionFromPath(schema, path, value) { + let v = value; + for (let i = path.length - 1; i >= 0; --i) { + const k = path[i]; + if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { + const a = []; + a[k] = v; + v = a; + } + else { + v = new Map([[k, v]]); + } + } + return createNode.createNode(v, undefined, { + aliasDuplicateObjects: false, + keepUndefined: false, + onAnchor: () => { + throw new Error('This should not happen, please report a bug.'); + }, + schema, + sourceObjects: new Map() + }); +} +// Type guard is intentionally a little wrong so as to be more useful, +// as it does not cover untypable empty non-string iterables (e.g. []). +const isEmptyPath = (path) => path == null || + (typeof path === 'object' && !!path[Symbol.iterator]().next().done); +class Collection extends Node.NodeBase { + constructor(type, schema) { + super(type); + Object.defineProperty(this, 'schema', { + value: schema, + configurable: true, + enumerable: false, + writable: true + }); + } + /** + * Create a copy of this collection. + * + * @param schema - If defined, overwrites the original's schema + */ + clone(schema) { + const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); + if (schema) + copy.schema = schema; + copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it); + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** + * Adds a value to the collection. For `!!map` and `!!omap` the value must + * be a Pair instance or a `{ key, value }` object, which may not have a key + * that already exists in the map. + */ + addIn(path, value) { + if (isEmptyPath(path)) + this.add(value); + else { + const [key, ...rest] = path; + const node = this.get(key, true); + if (identity.isCollection(node)) + node.addIn(rest, value); + else if (node === undefined && this.schema) + this.set(key, collectionFromPath(this.schema, rest, value)); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } + /** + * Removes a value from the collection. + * @returns `true` if the item was found and removed. + */ + deleteIn(path) { + const [key, ...rest] = path; + if (rest.length === 0) + return this.delete(key); + const node = this.get(key, true); + if (identity.isCollection(node)) + return node.deleteIn(rest); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + /** + * Returns item at `key`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + getIn(path, keepScalar) { + const [key, ...rest] = path; + const node = this.get(key, true); + if (rest.length === 0) + return !keepScalar && identity.isScalar(node) ? node.value : node; + else + return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; + } + hasAllNullValues(allowScalar) { + return this.items.every(node => { + if (!identity.isPair(node)) + return false; + const n = node.value; + return (n == null || + (allowScalar && + identity.isScalar(n) && + n.value == null && + !n.commentBefore && + !n.comment && + !n.tag)); + }); + } + /** + * Checks if the collection includes a value with the key `key`. + */ + hasIn(path) { + const [key, ...rest] = path; + if (rest.length === 0) + return this.has(key); + const node = this.get(key, true); + return identity.isCollection(node) ? node.hasIn(rest) : false; + } + /** + * Sets a value in this collection. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + setIn(path, value) { + const [key, ...rest] = path; + if (rest.length === 0) { + this.set(key, value); + } + else { + const node = this.get(key, true); + if (identity.isCollection(node)) + node.setIn(rest, value); + else if (node === undefined && this.schema) + this.set(key, collectionFromPath(this.schema, rest, value)); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } +} +Collection.maxFlowStringSingleLineLength = 60; + +exports.Collection = Collection; +exports.collectionFromPath = collectionFromPath; +exports.isEmptyPath = isEmptyPath; + + +/***/ }), + +/***/ 1399: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var applyReviver = __nccwpck_require__(3412); +var identity = __nccwpck_require__(5589); +var toJS = __nccwpck_require__(2463); + +class NodeBase { + constructor(type) { + Object.defineProperty(this, identity.NODE_TYPE, { value: type }); + } + /** Create a copy of this node. */ + clone() { + const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** A plain JavaScript representation of this node. */ + toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { + if (!identity.isDocument(doc)) + throw new TypeError('A document argument is required'); + const ctx = { + anchors: new Map(), + doc, + keep: true, + mapAsMap: mapAsMap === true, + mapKeyWarned: false, + maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 + }; + const res = toJS.toJS(this, '', ctx); + if (typeof onAnchor === 'function') + for (const { count, res } of ctx.anchors.values()) + onAnchor(res, count); + return typeof reviver === 'function' + ? applyReviver.applyReviver(reviver, { '': res }, '', res) + : res; + } +} + +exports.NodeBase = NodeBase; + + +/***/ }), + +/***/ 246: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var stringifyPair = __nccwpck_require__(4875); +var addPairToJSMap = __nccwpck_require__(4676); +var identity = __nccwpck_require__(5589); + +function createPair(key, value, ctx) { + const k = createNode.createNode(key, undefined, ctx); + const v = createNode.createNode(value, undefined, ctx); + return new Pair(k, v); +} +class Pair { + constructor(key, value = null) { + Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR }); + this.key = key; + this.value = value; + } + clone(schema) { + let { key, value } = this; + if (identity.isNode(key)) + key = key.clone(schema); + if (identity.isNode(value)) + value = value.clone(schema); + return new Pair(key, value); + } + toJSON(_, ctx) { + const pair = ctx?.mapAsMap ? new Map() : {}; + return addPairToJSMap.addPairToJSMap(ctx, pair, this); + } + toString(ctx, onComment, onChompKeep) { + return ctx?.doc + ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) + : JSON.stringify(this); + } +} + +exports.Pair = Pair; +exports.createPair = createPair; + + +/***/ }), + +/***/ 9338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); +var toJS = __nccwpck_require__(2463); + +const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); +class Scalar extends Node.NodeBase { + constructor(value) { + super(identity.SCALAR); + this.value = value; + } + toJSON(arg, ctx) { + return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); + } + toString() { + return String(this.value); + } +} +Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; +Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; +Scalar.PLAIN = 'PLAIN'; +Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; +Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; + +exports.Scalar = Scalar; +exports.isScalarValue = isScalarValue; + + +/***/ }), + +/***/ 6011: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyCollection = __nccwpck_require__(2466); +var addPairToJSMap = __nccwpck_require__(4676); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); + +function findPair(items, key) { + const k = identity.isScalar(key) ? key.value : key; + for (const it of items) { + if (identity.isPair(it)) { + if (it.key === key || it.key === k) + return it; + if (identity.isScalar(it.key) && it.key.value === k) + return it; + } + } + return undefined; +} +class YAMLMap extends Collection.Collection { + static get tagName() { + return 'tag:yaml.org,2002:map'; + } + constructor(schema) { + super(identity.MAP, schema); + this.items = []; + } + /** + * A generic collection parsing method that can be extended + * to other node classes that inherit from YAMLMap + */ + static from(schema, obj, ctx) { + const { keepUndefined, replacer } = ctx; + const map = new this(schema); + const add = (key, value) => { + if (typeof replacer === 'function') + value = replacer.call(obj, key, value); + else if (Array.isArray(replacer) && !replacer.includes(key)) + return; + if (value !== undefined || keepUndefined) + map.items.push(Pair.createPair(key, value, ctx)); + }; + if (obj instanceof Map) { + for (const [key, value] of obj) + add(key, value); + } + else if (obj && typeof obj === 'object') { + for (const key of Object.keys(obj)) + add(key, obj[key]); + } + if (typeof schema.sortMapEntries === 'function') { + map.items.sort(schema.sortMapEntries); + } + return map; + } + /** + * Adds a value to the collection. + * + * @param overwrite - If not set `true`, using a key that is already in the + * collection will throw. Otherwise, overwrites the previous value. + */ + add(pair, overwrite) { + let _pair; + if (identity.isPair(pair)) + _pair = pair; + else if (!pair || typeof pair !== 'object' || !('key' in pair)) { + // In TypeScript, this never happens. + _pair = new Pair.Pair(pair, pair?.value); + } + else + _pair = new Pair.Pair(pair.key, pair.value); + const prev = findPair(this.items, _pair.key); + const sortEntries = this.schema?.sortMapEntries; + if (prev) { + if (!overwrite) + throw new Error(`Key ${_pair.key} already set`); + // For scalars, keep the old node & its comments and anchors + if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) + prev.value.value = _pair.value; + else + prev.value = _pair.value; + } + else if (sortEntries) { + const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); + if (i === -1) + this.items.push(_pair); + else + this.items.splice(i, 0, _pair); + } + else { + this.items.push(_pair); + } + } + delete(key) { + const it = findPair(this.items, key); + if (!it) + return false; + const del = this.items.splice(this.items.indexOf(it), 1); + return del.length > 0; + } + get(key, keepScalar) { + const it = findPair(this.items, key); + const node = it?.value; + return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined; + } + has(key) { + return !!findPair(this.items, key); + } + set(key, value) { + this.add(new Pair.Pair(key, value), true); + } + /** + * @param ctx - Conversion context, originally set in Document#toJS() + * @param {Class} Type - If set, forces the returned collection type + * @returns Instance of Type, Map, or Object + */ + toJSON(_, ctx, Type) { + const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; + if (ctx?.onCreate) + ctx.onCreate(map); + for (const item of this.items) + addPairToJSMap.addPairToJSMap(ctx, map, item); + return map; + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + for (const item of this.items) { + if (!identity.isPair(item)) + throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); + } + if (!ctx.allNullValues && this.hasAllNullValues(false)) + ctx = Object.assign({}, ctx, { allNullValues: true }); + return stringifyCollection.stringifyCollection(this, ctx, { + blockItemPrefix: '', + flowChars: { start: '{', end: '}' }, + itemIndent: ctx.indent || '', + onChompKeep, + onComment + }); + } +} + +exports.YAMLMap = YAMLMap; +exports.findPair = findPair; + + +/***/ }), + +/***/ 5161: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var stringifyCollection = __nccwpck_require__(2466); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var toJS = __nccwpck_require__(2463); + +class YAMLSeq extends Collection.Collection { + static get tagName() { + return 'tag:yaml.org,2002:seq'; + } + constructor(schema) { + super(identity.SEQ, schema); + this.items = []; + } + add(value) { + this.items.push(value); + } + /** + * Removes a value from the collection. + * + * `key` must contain a representation of an integer for this to succeed. + * It may be wrapped in a `Scalar`. + * + * @returns `true` if the item was found and removed. + */ + delete(key) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + return false; + const del = this.items.splice(idx, 1); + return del.length > 0; + } + get(key, keepScalar) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + return undefined; + const it = this.items[idx]; + return !keepScalar && identity.isScalar(it) ? it.value : it; + } + /** + * Checks if the collection includes a value with the key `key`. + * + * `key` must contain a representation of an integer for this to succeed. + * It may be wrapped in a `Scalar`. + */ + has(key) { + const idx = asItemIndex(key); + return typeof idx === 'number' && idx < this.items.length; + } + /** + * Sets a value in this collection. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + * + * If `key` does not contain a representation of an integer, this will throw. + * It may be wrapped in a `Scalar`. + */ + set(key, value) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + throw new Error(`Expected a valid index, not ${key}.`); + const prev = this.items[idx]; + if (identity.isScalar(prev) && Scalar.isScalarValue(value)) + prev.value = value; + else + this.items[idx] = value; + } + toJSON(_, ctx) { + const seq = []; + if (ctx?.onCreate) + ctx.onCreate(seq); + let i = 0; + for (const item of this.items) + seq.push(toJS.toJS(item, String(i++), ctx)); + return seq; + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + return stringifyCollection.stringifyCollection(this, ctx, { + blockItemPrefix: '- ', + flowChars: { start: '[', end: ']' }, + itemIndent: (ctx.indent || '') + ' ', + onChompKeep, + onComment + }); + } + static from(schema, obj, ctx) { + const { replacer } = ctx; + const seq = new this(schema); + if (obj && Symbol.iterator in Object(obj)) { + let i = 0; + for (let it of obj) { + if (typeof replacer === 'function') { + const key = obj instanceof Set ? it : String(i++); + it = replacer.call(obj, key, it); + } + seq.items.push(createNode.createNode(it, undefined, ctx)); + } + } + return seq; + } +} +function asItemIndex(key) { + let idx = identity.isScalar(key) ? key.value : key; + if (idx && typeof idx === 'string') + idx = Number(idx); + return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 + ? idx + : null; +} + +exports.YAMLSeq = YAMLSeq; + + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var log = __nccwpck_require__(6909); +var stringify = __nccwpck_require__(8409); +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var toJS = __nccwpck_require__(2463); + +const MERGE_KEY = '<<'; +function addPairToJSMap(ctx, map, { key, value }) { + if (ctx?.doc.schema.merge && isMergeKey(key)) { + value = identity.isAlias(value) ? value.resolve(ctx.doc) : value; + if (identity.isSeq(value)) + for (const it of value.items) + mergeToJSMap(ctx, map, it); + else if (Array.isArray(value)) + for (const it of value) + mergeToJSMap(ctx, map, it); + else + mergeToJSMap(ctx, map, value); + } + else { + const jsKey = toJS.toJS(key, '', ctx); + if (map instanceof Map) { + map.set(jsKey, toJS.toJS(value, jsKey, ctx)); + } + else if (map instanceof Set) { + map.add(jsKey); + } + else { + const stringKey = stringifyKey(key, jsKey, ctx); + const jsValue = toJS.toJS(value, stringKey, ctx); + if (stringKey in map) + Object.defineProperty(map, stringKey, { + value: jsValue, + writable: true, + enumerable: true, + configurable: true + }); + else + map[stringKey] = jsValue; + } + } + return map; +} +const isMergeKey = (key) => key === MERGE_KEY || + (identity.isScalar(key) && + key.value === MERGE_KEY && + (!key.type || key.type === Scalar.Scalar.PLAIN)); +// If the value associated with a merge key is a single mapping node, each of +// its key/value pairs is inserted into the current mapping, unless the key +// already exists in it. If the value associated with the merge key is a +// sequence, then this sequence is expected to contain mapping nodes and each +// of these nodes is merged in turn according to its order in the sequence. +// Keys in mapping nodes earlier in the sequence override keys specified in +// later mapping nodes. -- http://yaml.org/type/merge.html +function mergeToJSMap(ctx, map, value) { + const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; + if (!identity.isMap(source)) + throw new Error('Merge sources must be maps or map aliases'); + const srcMap = source.toJSON(null, ctx, Map); + for (const [key, value] of srcMap) { + if (map instanceof Map) { + if (!map.has(key)) + map.set(key, value); + } + else if (map instanceof Set) { + map.add(key); + } + else if (!Object.prototype.hasOwnProperty.call(map, key)) { + Object.defineProperty(map, key, { + value, + writable: true, + enumerable: true, + configurable: true + }); + } + } + return map; +} +function stringifyKey(key, jsKey, ctx) { + if (jsKey === null) + return ''; + if (typeof jsKey !== 'object') + return String(jsKey); + if (identity.isNode(key) && ctx?.doc) { + const strCtx = stringify.createStringifyContext(ctx.doc, {}); + strCtx.anchors = new Set(); + for (const node of ctx.anchors.keys()) + strCtx.anchors.add(node.anchor); + strCtx.inFlow = true; + strCtx.inStringifyKey = true; + const strKey = key.toString(strCtx); + if (!ctx.mapKeyWarned) { + let jsonStr = JSON.stringify(strKey); + if (jsonStr.length > 40) + jsonStr = jsonStr.substring(0, 36) + '..."'; + log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); + ctx.mapKeyWarned = true; + } + return strKey; + } + return JSON.stringify(jsKey); +} + +exports.addPairToJSMap = addPairToJSMap; + + +/***/ }), + +/***/ 5589: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const ALIAS = Symbol.for('yaml.alias'); +const DOC = Symbol.for('yaml.document'); +const MAP = Symbol.for('yaml.map'); +const PAIR = Symbol.for('yaml.pair'); +const SCALAR = Symbol.for('yaml.scalar'); +const SEQ = Symbol.for('yaml.seq'); +const NODE_TYPE = Symbol.for('yaml.node.type'); +const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; +const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; +const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; +const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; +const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; +const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; +function isCollection(node) { + if (node && typeof node === 'object') + switch (node[NODE_TYPE]) { + case MAP: + case SEQ: + return true; + } + return false; +} +function isNode(node) { + if (node && typeof node === 'object') + switch (node[NODE_TYPE]) { + case ALIAS: + case MAP: + case SCALAR: + case SEQ: + return true; + } + return false; +} +const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; + +exports.ALIAS = ALIAS; +exports.DOC = DOC; +exports.MAP = MAP; +exports.NODE_TYPE = NODE_TYPE; +exports.PAIR = PAIR; +exports.SCALAR = SCALAR; +exports.SEQ = SEQ; +exports.hasAnchor = hasAnchor; +exports.isAlias = isAlias; +exports.isCollection = isCollection; +exports.isDocument = isDocument; +exports.isMap = isMap; +exports.isNode = isNode; +exports.isPair = isPair; +exports.isScalar = isScalar; +exports.isSeq = isSeq; + + +/***/ }), + +/***/ 2463: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +/** + * Recursively convert any node or its contents to native JavaScript + * + * @param value - The input value + * @param arg - If `value` defines a `toJSON()` method, use this + * as its first argument + * @param ctx - Conversion context, originally set in Document#toJS(). If + * `{ keep: true }` is not set, output should be suitable for JSON + * stringification. + */ +function toJS(value, arg, ctx) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + if (Array.isArray(value)) + return value.map((v, i) => toJS(v, String(i), ctx)); + if (value && typeof value.toJSON === 'function') { + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + if (!ctx || !identity.hasAnchor(value)) + return value.toJSON(arg, ctx); + const data = { aliasCount: 0, count: 1, res: undefined }; + ctx.anchors.set(value, data); + ctx.onCreate = res => { + data.res = res; + delete ctx.onCreate; + }; + const res = value.toJSON(arg, ctx); + if (ctx.onCreate) + ctx.onCreate(res); + return res; + } + if (typeof value === 'bigint' && !ctx?.keep) + return Number(value); + return value; +} + +exports.toJS = toJS; + + +/***/ }), + +/***/ 9027: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var resolveBlockScalar = __nccwpck_require__(9485); +var resolveFlowScalar = __nccwpck_require__(7578); +var errors = __nccwpck_require__(4236); +var stringifyString = __nccwpck_require__(6226); + +function resolveAsScalar(token, strict = true, onError) { + if (token) { + const _onError = (pos, code, message) => { + const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; + if (onError) + onError(offset, code, message); + else + throw new errors.YAMLParseError([offset, offset + 1], code, message); + }; + switch (token.type) { + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); + case 'block-scalar': + return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); + } + } + return null; +} +/** + * Create a new scalar token with `value` + * + * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, + * as this function does not support any schema operations and won't check for such conflicts. + * + * @param value The string representation of the value, which will have its content properly indented. + * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. + * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. + * @param context.indent The indent level of the token. + * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. + * @param context.offset The offset position of the token. + * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. + */ +function createScalarToken(value, context) { + const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; + const source = stringifyString.stringifyString({ type, value }, { + implicitKey, + indent: indent > 0 ? ' '.repeat(indent) : '', + inFlow, + options: { blockQuote: true, lineWidth: -1 } + }); + const end = context.end ?? [ + { type: 'newline', offset: -1, indent, source: '\n' } + ]; + switch (source[0]) { + case '|': + case '>': { + const he = source.indexOf('\n'); + const head = source.substring(0, he); + const body = source.substring(he + 1) + '\n'; + const props = [ + { type: 'block-scalar-header', offset, indent, source: head } + ]; + if (!addEndtoBlockProps(props, end)) + props.push({ type: 'newline', offset: -1, indent, source: '\n' }); + return { type: 'block-scalar', offset, indent, props, source: body }; + } + case '"': + return { type: 'double-quoted-scalar', offset, indent, source, end }; + case "'": + return { type: 'single-quoted-scalar', offset, indent, source, end }; + default: + return { type: 'scalar', offset, indent, source, end }; + } +} +/** + * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. + * + * Best efforts are made to retain any comments previously associated with the `token`, + * though all contents within a collection's `items` will be overwritten. + * + * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, + * as this function does not support any schema operations and won't check for such conflicts. + * + * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. + * @param value The string representation of the value, which will have its content properly indented. + * @param context.afterKey In most cases, values after a key should have an additional level of indentation. + * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. + * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. + * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. + */ +function setScalarValue(token, value, context = {}) { + let { afterKey = false, implicitKey = false, inFlow = false, type } = context; + let indent = 'indent' in token ? token.indent : null; + if (afterKey && typeof indent === 'number') + indent += 2; + if (!type) + switch (token.type) { + case 'single-quoted-scalar': + type = 'QUOTE_SINGLE'; + break; + case 'double-quoted-scalar': + type = 'QUOTE_DOUBLE'; + break; + case 'block-scalar': { + const header = token.props[0]; + if (header.type !== 'block-scalar-header') + throw new Error('Invalid block scalar header'); + type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; + break; + } + default: + type = 'PLAIN'; + } + const source = stringifyString.stringifyString({ type, value }, { + implicitKey: implicitKey || indent === null, + indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', + inFlow, + options: { blockQuote: true, lineWidth: -1 } + }); + switch (source[0]) { + case '|': + case '>': + setBlockScalarValue(token, source); + break; + case '"': + setFlowScalarValue(token, source, 'double-quoted-scalar'); + break; + case "'": + setFlowScalarValue(token, source, 'single-quoted-scalar'); + break; + default: + setFlowScalarValue(token, source, 'scalar'); + } +} +function setBlockScalarValue(token, source) { + const he = source.indexOf('\n'); + const head = source.substring(0, he); + const body = source.substring(he + 1) + '\n'; + if (token.type === 'block-scalar') { + const header = token.props[0]; + if (header.type !== 'block-scalar-header') + throw new Error('Invalid block scalar header'); + header.source = head; + token.source = body; + } + else { + const { offset } = token; + const indent = 'indent' in token ? token.indent : -1; + const props = [ + { type: 'block-scalar-header', offset, indent, source: head } + ]; + if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) + props.push({ type: 'newline', offset: -1, indent, source: '\n' }); + for (const key of Object.keys(token)) + if (key !== 'type' && key !== 'offset') + delete token[key]; + Object.assign(token, { type: 'block-scalar', indent, props, source: body }); + } +} +/** @returns `true` if last token is a newline */ +function addEndtoBlockProps(props, end) { + if (end) + for (const st of end) + switch (st.type) { + case 'space': + case 'comment': + props.push(st); + break; + case 'newline': + props.push(st); + return true; + } + return false; +} +function setFlowScalarValue(token, source, type) { + switch (token.type) { + case 'scalar': + case 'double-quoted-scalar': + case 'single-quoted-scalar': + token.type = type; + token.source = source; + break; + case 'block-scalar': { + const end = token.props.slice(1); + let oa = source.length; + if (token.props[0].type === 'block-scalar-header') + oa -= token.props[0].source.length; + for (const tok of end) + tok.offset += oa; + delete token.props; + Object.assign(token, { type, source, end }); + break; + } + case 'block-map': + case 'block-seq': { + const offset = token.offset + source.length; + const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; + delete token.items; + Object.assign(token, { type, source, end: [nl] }); + break; + } + default: { + const indent = 'indent' in token ? token.indent : -1; + const end = 'end' in token && Array.isArray(token.end) + ? token.end.filter(st => st.type === 'space' || + st.type === 'comment' || + st.type === 'newline') + : []; + for (const key of Object.keys(token)) + if (key !== 'type' && key !== 'offset') + delete token[key]; + Object.assign(token, { type, indent, source, end }); + } + } +} + +exports.createScalarToken = createScalarToken; +exports.resolveAsScalar = resolveAsScalar; +exports.setScalarValue = setScalarValue; + + +/***/ }), + +/***/ 6307: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Stringify a CST document, token, or collection item + * + * Fair warning: This applies no validation whatsoever, and + * simply concatenates the sources in their logical order. + */ +const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); +function stringifyToken(token) { + switch (token.type) { + case 'block-scalar': { + let res = ''; + for (const tok of token.props) + res += stringifyToken(tok); + return res + token.source; + } + case 'block-map': + case 'block-seq': { + let res = ''; + for (const item of token.items) + res += stringifyItem(item); + return res; + } + case 'flow-collection': { + let res = token.start.source; + for (const item of token.items) + res += stringifyItem(item); + for (const st of token.end) + res += st.source; + return res; + } + case 'document': { + let res = stringifyItem(token); + if (token.end) + for (const st of token.end) + res += st.source; + return res; + } + default: { + let res = token.source; + if ('end' in token && token.end) + for (const st of token.end) + res += st.source; + return res; + } + } +} +function stringifyItem({ start, key, sep, value }) { + let res = ''; + for (const st of start) + res += st.source; + if (key) + res += stringifyToken(key); + if (sep) + for (const st of sep) + res += st.source; + if (value) + res += stringifyToken(value); + return res; +} + +exports.stringify = stringify; + + +/***/ }), + +/***/ 8497: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const BREAK = Symbol('break visit'); +const SKIP = Symbol('skip children'); +const REMOVE = Symbol('remove item'); +/** + * Apply a visitor to a CST document or item. + * + * Walks through the tree (depth-first) starting from the root, calling a + * `visitor` function with two arguments when entering each item: + * - `item`: The current item, which included the following members: + * - `start: SourceToken[]` – Source tokens before the key or value, + * possibly including its anchor or tag. + * - `key?: Token | null` – Set for pair values. May then be `null`, if + * the key before the `:` separator is empty. + * - `sep?: SourceToken[]` – Source tokens between the key and the value, + * which should include the `:` map value indicator if `value` is set. + * - `value?: Token` – The value of a sequence item, or of a map pair. + * - `path`: The steps from the root to the current node, as an array of + * `['key' | 'value', number]` tuples. + * + * The return value of the visitor may be used to control the traversal: + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this token, continue with + * next sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current item, then continue with the next one + * - `number`: Set the index of the next step. This is useful especially if + * the index of the current token has changed. + * - `function`: Define the next visitor for this item. After the original + * visitor is called on item entry, next visitors are called after handling + * a non-empty `key` and when exiting the item. + */ +function visit(cst, visitor) { + if ('type' in cst && cst.type === 'document') + cst = { start: cst.start, value: cst.value }; + _visit(Object.freeze([]), cst, visitor); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visit.BREAK = BREAK; +/** Do not visit the children of the current item */ +visit.SKIP = SKIP; +/** Remove the current item */ +visit.REMOVE = REMOVE; +/** Find the item at `path` from `cst` as the root */ +visit.itemAtPath = (cst, path) => { + let item = cst; + for (const [field, index] of path) { + const tok = item?.[field]; + if (tok && 'items' in tok) { + item = tok.items[index]; + } + else + return undefined; + } + return item; +}; +/** + * Get the immediate parent collection of the item at `path` from `cst` as the root. + * + * Throws an error if the collection is not found, which should never happen if the item itself exists. + */ +visit.parentCollection = (cst, path) => { + const parent = visit.itemAtPath(cst, path.slice(0, -1)); + const field = path[path.length - 1][0]; + const coll = parent?.[field]; + if (coll && 'items' in coll) + return coll; + throw new Error('Parent collection not found'); +}; +function _visit(path, item, visitor) { + let ctrl = visitor(item, path); + if (typeof ctrl === 'symbol') + return ctrl; + for (const field of ['key', 'value']) { + const token = item[field]; + if (token && 'items' in token) { + for (let i = 0; i < token.items.length; ++i) { + const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + token.items.splice(i, 1); + i -= 1; + } + } + if (typeof ctrl === 'function' && field === 'key') + ctrl = ctrl(item, path); + } + } + return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; +} + +exports.visit = visit; + + +/***/ }), + +/***/ 9169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cstScalar = __nccwpck_require__(9027); +var cstStringify = __nccwpck_require__(6307); +var cstVisit = __nccwpck_require__(8497); + +/** The byte order mark */ +const BOM = '\u{FEFF}'; +/** Start of doc-mode */ +const DOCUMENT = '\x02'; // C0: Start of Text +/** Unexpected end of flow-mode */ +const FLOW_END = '\x18'; // C0: Cancel +/** Next token is a scalar value */ +const SCALAR = '\x1f'; // C0: Unit Separator +/** @returns `true` if `token` is a flow or block collection */ +const isCollection = (token) => !!token && 'items' in token; +/** @returns `true` if `token` is a flow or block scalar; not an alias */ +const isScalar = (token) => !!token && + (token.type === 'scalar' || + token.type === 'single-quoted-scalar' || + token.type === 'double-quoted-scalar' || + token.type === 'block-scalar'); +/* istanbul ignore next */ +/** Get a printable representation of a lexer token */ +function prettyToken(token) { + switch (token) { + case BOM: + return ''; + case DOCUMENT: + return ''; + case FLOW_END: + return ''; + case SCALAR: + return ''; + default: + return JSON.stringify(token); + } +} +/** Identify the type of a lexer token. May return `null` for unknown tokens. */ +function tokenType(source) { + switch (source) { + case BOM: + return 'byte-order-mark'; + case DOCUMENT: + return 'doc-mode'; + case FLOW_END: + return 'flow-error-end'; + case SCALAR: + return 'scalar'; + case '---': + return 'doc-start'; + case '...': + return 'doc-end'; + case '': + case '\n': + case '\r\n': + return 'newline'; + case '-': + return 'seq-item-ind'; + case '?': + return 'explicit-key-ind'; + case ':': + return 'map-value-ind'; + case '{': + return 'flow-map-start'; + case '}': + return 'flow-map-end'; + case '[': + return 'flow-seq-start'; + case ']': + return 'flow-seq-end'; + case ',': + return 'comma'; + } + switch (source[0]) { + case ' ': + case '\t': + return 'space'; + case '#': + return 'comment'; + case '%': + return 'directive-line'; + case '*': + return 'alias'; + case '&': + return 'anchor'; + case '!': + return 'tag'; + case "'": + return 'single-quoted-scalar'; + case '"': + return 'double-quoted-scalar'; + case '|': + case '>': + return 'block-scalar-header'; + } + return null; +} + +exports.createScalarToken = cstScalar.createScalarToken; +exports.resolveAsScalar = cstScalar.resolveAsScalar; +exports.setScalarValue = cstScalar.setScalarValue; +exports.stringify = cstStringify.stringify; +exports.visit = cstVisit.visit; +exports.BOM = BOM; +exports.DOCUMENT = DOCUMENT; +exports.FLOW_END = FLOW_END; +exports.SCALAR = SCALAR; +exports.isCollection = isCollection; +exports.isScalar = isScalar; +exports.prettyToken = prettyToken; +exports.tokenType = tokenType; + + +/***/ }), + +/***/ 5976: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cst = __nccwpck_require__(9169); + +/* +START -> stream + +stream + directive -> line-end -> stream + indent + line-end -> stream + [else] -> line-start + +line-end + comment -> line-end + newline -> . + input-end -> END + +line-start + doc-start -> doc + doc-end -> stream + [else] -> indent -> block-start + +block-start + seq-item-start -> block-start + explicit-key-start -> block-start + map-value-start -> block-start + [else] -> doc + +doc + line-end -> line-start + spaces -> doc + anchor -> doc + tag -> doc + flow-start -> flow -> doc + flow-end -> error -> doc + seq-item-start -> error -> doc + explicit-key-start -> error -> doc + map-value-start -> doc + alias -> doc + quote-start -> quoted-scalar -> doc + block-scalar-header -> line-end -> block-scalar(min) -> line-start + [else] -> plain-scalar(false, min) -> doc + +flow + line-end -> flow + spaces -> flow + anchor -> flow + tag -> flow + flow-start -> flow -> flow + flow-end -> . + seq-item-start -> error -> flow + explicit-key-start -> flow + map-value-start -> flow + alias -> flow + quote-start -> quoted-scalar -> flow + comma -> flow + [else] -> plain-scalar(true, 0) -> flow + +quoted-scalar + quote-end -> . + [else] -> quoted-scalar + +block-scalar(min) + newline + peek(indent < min) -> . + [else] -> block-scalar(min) + +plain-scalar(is-flow, min) + scalar-end(is-flow) -> . + peek(newline + (indent < min)) -> . + [else] -> plain-scalar(min) +*/ +function isEmpty(ch) { + switch (ch) { + case undefined: + case ' ': + case '\n': + case '\r': + case '\t': + return true; + default: + return false; + } +} +const hexDigits = '0123456789ABCDEFabcdef'.split(''); +const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); +const invalidFlowScalarChars = ',[]{}'.split(''); +const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); +const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); +/** + * Splits an input string into lexical tokens, i.e. smaller strings that are + * easily identifiable by `tokens.tokenType()`. + * + * Lexing starts always in a "stream" context. Incomplete input may be buffered + * until a complete token can be emitted. + * + * In addition to slices of the original input, the following control characters + * may also be emitted: + * + * - `\x02` (Start of Text): A document starts with the next token + * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) + * - `\x1f` (Unit Separator): Next token is a scalar value + * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents + */ +class Lexer { + constructor() { + /** + * Flag indicating whether the end of the current buffer marks the end of + * all input + */ + this.atEnd = false; + /** + * Explicit indent set in block scalar header, as an offset from the current + * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not + * explicitly set. + */ + this.blockScalarIndent = -1; + /** + * Block scalars that include a + (keep) chomping indicator in their header + * include trailing empty lines, which are otherwise excluded from the + * scalar's contents. + */ + this.blockScalarKeep = false; + /** Current input */ + this.buffer = ''; + /** + * Flag noting whether the map value indicator : can immediately follow this + * node within a flow context. + */ + this.flowKey = false; + /** Count of surrounding flow collection levels. */ + this.flowLevel = 0; + /** + * Minimum level of indentation required for next lines to be parsed as a + * part of the current scalar value. + */ + this.indentNext = 0; + /** Indentation level of the current line. */ + this.indentValue = 0; + /** Position of the next \n character. */ + this.lineEndPos = null; + /** Stores the state of the lexer if reaching the end of incpomplete input */ + this.next = null; + /** A pointer to `buffer`; the current position of the lexer. */ + this.pos = 0; + } + /** + * Generate YAML tokens from the `source` string. If `incomplete`, + * a part of the last line may be left as a buffer for the next call. + * + * @returns A generator of lexical tokens + */ + *lex(source, incomplete = false) { + if (source) { + this.buffer = this.buffer ? this.buffer + source : source; + this.lineEndPos = null; + } + this.atEnd = !incomplete; + let next = this.next ?? 'stream'; + while (next && (incomplete || this.hasChars(1))) + next = yield* this.parseNext(next); + } + atLineEnd() { + let i = this.pos; + let ch = this.buffer[i]; + while (ch === ' ' || ch === '\t') + ch = this.buffer[++i]; + if (!ch || ch === '#' || ch === '\n') + return true; + if (ch === '\r') + return this.buffer[i + 1] === '\n'; + return false; + } + charAt(n) { + return this.buffer[this.pos + n]; + } + continueScalar(offset) { + let ch = this.buffer[offset]; + if (this.indentNext > 0) { + let indent = 0; + while (ch === ' ') + ch = this.buffer[++indent + offset]; + if (ch === '\r') { + const next = this.buffer[indent + offset + 1]; + if (next === '\n' || (!next && !this.atEnd)) + return offset + indent + 1; + } + return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) + ? offset + indent + : -1; + } + if (ch === '-' || ch === '.') { + const dt = this.buffer.substr(offset, 3); + if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) + return -1; + } + return offset; + } + getLine() { + let end = this.lineEndPos; + if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { + end = this.buffer.indexOf('\n', this.pos); + this.lineEndPos = end; + } + if (end === -1) + return this.atEnd ? this.buffer.substring(this.pos) : null; + if (this.buffer[end - 1] === '\r') + end -= 1; + return this.buffer.substring(this.pos, end); + } + hasChars(n) { + return this.pos + n <= this.buffer.length; + } + setNext(state) { + this.buffer = this.buffer.substring(this.pos); + this.pos = 0; + this.lineEndPos = null; + this.next = state; + return null; + } + peek(n) { + return this.buffer.substr(this.pos, n); + } + *parseNext(next) { + switch (next) { + case 'stream': + return yield* this.parseStream(); + case 'line-start': + return yield* this.parseLineStart(); + case 'block-start': + return yield* this.parseBlockStart(); + case 'doc': + return yield* this.parseDocument(); + case 'flow': + return yield* this.parseFlowCollection(); + case 'quoted-scalar': + return yield* this.parseQuotedScalar(); + case 'block-scalar': + return yield* this.parseBlockScalar(); + case 'plain-scalar': + return yield* this.parsePlainScalar(); + } + } + *parseStream() { + let line = this.getLine(); + if (line === null) + return this.setNext('stream'); + if (line[0] === cst.BOM) { + yield* this.pushCount(1); + line = line.substring(1); + } + if (line[0] === '%') { + let dirEnd = line.length; + const cs = line.indexOf('#'); + if (cs !== -1) { + const ch = line[cs - 1]; + if (ch === ' ' || ch === '\t') + dirEnd = cs - 1; + } + while (true) { + const ch = line[dirEnd - 1]; + if (ch === ' ' || ch === '\t') + dirEnd -= 1; + else + break; + } + const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); + yield* this.pushCount(line.length - n); // possible comment + this.pushNewline(); + return 'stream'; + } + if (this.atLineEnd()) { + const sp = yield* this.pushSpaces(true); + yield* this.pushCount(line.length - sp); + yield* this.pushNewline(); + return 'stream'; + } + yield cst.DOCUMENT; + return yield* this.parseLineStart(); + } + *parseLineStart() { + const ch = this.charAt(0); + if (!ch && !this.atEnd) + return this.setNext('line-start'); + if (ch === '-' || ch === '.') { + if (!this.atEnd && !this.hasChars(4)) + return this.setNext('line-start'); + const s = this.peek(3); + if (s === '---' && isEmpty(this.charAt(3))) { + yield* this.pushCount(3); + this.indentValue = 0; + this.indentNext = 0; + return 'doc'; + } + else if (s === '...' && isEmpty(this.charAt(3))) { + yield* this.pushCount(3); + return 'stream'; + } + } + this.indentValue = yield* this.pushSpaces(false); + if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) + this.indentNext = this.indentValue; + return yield* this.parseBlockStart(); + } + *parseBlockStart() { + const [ch0, ch1] = this.peek(2); + if (!ch1 && !this.atEnd) + return this.setNext('block-start'); + if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { + const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); + this.indentNext = this.indentValue + 1; + this.indentValue += n; + return yield* this.parseBlockStart(); + } + return 'doc'; + } + *parseDocument() { + yield* this.pushSpaces(true); + const line = this.getLine(); + if (line === null) + return this.setNext('doc'); + let n = yield* this.pushIndicators(); + switch (line[n]) { + case '#': + yield* this.pushCount(line.length - n); + // fallthrough + case undefined: + yield* this.pushNewline(); + return yield* this.parseLineStart(); + case '{': + case '[': + yield* this.pushCount(1); + this.flowKey = false; + this.flowLevel = 1; + return 'flow'; + case '}': + case ']': + // this is an error + yield* this.pushCount(1); + return 'doc'; + case '*': + yield* this.pushUntil(isNotAnchorChar); + return 'doc'; + case '"': + case "'": + return yield* this.parseQuotedScalar(); + case '|': + case '>': + n += yield* this.parseBlockScalarHeader(); + n += yield* this.pushSpaces(true); + yield* this.pushCount(line.length - n); + yield* this.pushNewline(); + return yield* this.parseBlockScalar(); + default: + return yield* this.parsePlainScalar(); + } + } + *parseFlowCollection() { + let nl, sp; + let indent = -1; + do { + nl = yield* this.pushNewline(); + if (nl > 0) { + sp = yield* this.pushSpaces(false); + this.indentValue = indent = sp; + } + else { + sp = 0; + } + sp += yield* this.pushSpaces(true); + } while (nl + sp > 0); + const line = this.getLine(); + if (line === null) + return this.setNext('flow'); + if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || + (indent === 0 && + (line.startsWith('---') || line.startsWith('...')) && + isEmpty(line[3]))) { + // Allowing for the terminal ] or } at the same (rather than greater) + // indent level as the initial [ or { is technically invalid, but + // failing here would be surprising to users. + const atFlowEndMarker = indent === this.indentNext - 1 && + this.flowLevel === 1 && + (line[0] === ']' || line[0] === '}'); + if (!atFlowEndMarker) { + // this is an error + this.flowLevel = 0; + yield cst.FLOW_END; + return yield* this.parseLineStart(); + } + } + let n = 0; + while (line[n] === ',') { + n += yield* this.pushCount(1); + n += yield* this.pushSpaces(true); + this.flowKey = false; + } + n += yield* this.pushIndicators(); + switch (line[n]) { + case undefined: + return 'flow'; + case '#': + yield* this.pushCount(line.length - n); + return 'flow'; + case '{': + case '[': + yield* this.pushCount(1); + this.flowKey = false; + this.flowLevel += 1; + return 'flow'; + case '}': + case ']': + yield* this.pushCount(1); + this.flowKey = true; + this.flowLevel -= 1; + return this.flowLevel ? 'flow' : 'doc'; + case '*': + yield* this.pushUntil(isNotAnchorChar); + return 'flow'; + case '"': + case "'": + this.flowKey = true; + return yield* this.parseQuotedScalar(); + case ':': { + const next = this.charAt(1); + if (this.flowKey || isEmpty(next) || next === ',') { + this.flowKey = false; + yield* this.pushCount(1); + yield* this.pushSpaces(true); + return 'flow'; + } + } + // fallthrough + default: + this.flowKey = false; + return yield* this.parsePlainScalar(); + } + } + *parseQuotedScalar() { + const quote = this.charAt(0); + let end = this.buffer.indexOf(quote, this.pos + 1); + if (quote === "'") { + while (end !== -1 && this.buffer[end + 1] === "'") + end = this.buffer.indexOf("'", end + 2); + } + else { + // double-quote + while (end !== -1) { + let n = 0; + while (this.buffer[end - 1 - n] === '\\') + n += 1; + if (n % 2 === 0) + break; + end = this.buffer.indexOf('"', end + 1); + } + } + // Only looking for newlines within the quotes + const qb = this.buffer.substring(0, end); + let nl = qb.indexOf('\n', this.pos); + if (nl !== -1) { + while (nl !== -1) { + const cs = this.continueScalar(nl + 1); + if (cs === -1) + break; + nl = qb.indexOf('\n', cs); + } + if (nl !== -1) { + // this is an error caused by an unexpected unindent + end = nl - (qb[nl - 1] === '\r' ? 2 : 1); + } + } + if (end === -1) { + if (!this.atEnd) + return this.setNext('quoted-scalar'); + end = this.buffer.length; + } + yield* this.pushToIndex(end + 1, false); + return this.flowLevel ? 'flow' : 'doc'; + } + *parseBlockScalarHeader() { + this.blockScalarIndent = -1; + this.blockScalarKeep = false; + let i = this.pos; + while (true) { + const ch = this.buffer[++i]; + if (ch === '+') + this.blockScalarKeep = true; + else if (ch > '0' && ch <= '9') + this.blockScalarIndent = Number(ch) - 1; + else if (ch !== '-') + break; + } + return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); + } + *parseBlockScalar() { + let nl = this.pos - 1; // may be -1 if this.pos === 0 + let indent = 0; + let ch; + loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { + switch (ch) { + case ' ': + indent += 1; + break; + case '\n': + nl = i; + indent = 0; + break; + case '\r': { + const next = this.buffer[i + 1]; + if (!next && !this.atEnd) + return this.setNext('block-scalar'); + if (next === '\n') + break; + } // fallthrough + default: + break loop; + } + } + if (!ch && !this.atEnd) + return this.setNext('block-scalar'); + if (indent >= this.indentNext) { + if (this.blockScalarIndent === -1) + this.indentNext = indent; + else + this.indentNext += this.blockScalarIndent; + do { + const cs = this.continueScalar(nl + 1); + if (cs === -1) + break; + nl = this.buffer.indexOf('\n', cs); + } while (nl !== -1); + if (nl === -1) { + if (!this.atEnd) + return this.setNext('block-scalar'); + nl = this.buffer.length; + } + } + if (!this.blockScalarKeep) { + do { + let i = nl - 1; + let ch = this.buffer[i]; + if (ch === '\r') + ch = this.buffer[--i]; + const lastChar = i; // Drop the line if last char not more indented + while (ch === ' ' || ch === '\t') + ch = this.buffer[--i]; + if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) + nl = i; + else + break; + } while (true); + } + yield cst.SCALAR; + yield* this.pushToIndex(nl + 1, true); + return yield* this.parseLineStart(); + } + *parsePlainScalar() { + const inFlow = this.flowLevel > 0; + let end = this.pos - 1; + let i = this.pos - 1; + let ch; + while ((ch = this.buffer[++i])) { + if (ch === ':') { + const next = this.buffer[i + 1]; + if (isEmpty(next) || (inFlow && next === ',')) + break; + end = i; + } + else if (isEmpty(ch)) { + let next = this.buffer[i + 1]; + if (ch === '\r') { + if (next === '\n') { + i += 1; + ch = '\n'; + next = this.buffer[i + 1]; + } + else + end = i; + } + if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) + break; + if (ch === '\n') { + const cs = this.continueScalar(i + 1); + if (cs === -1) + break; + i = Math.max(i, cs - 2); // to advance, but still account for ' #' + } + } + else { + if (inFlow && invalidFlowScalarChars.includes(ch)) + break; + end = i; + } + } + if (!ch && !this.atEnd) + return this.setNext('plain-scalar'); + yield cst.SCALAR; + yield* this.pushToIndex(end + 1, true); + return inFlow ? 'flow' : 'doc'; + } + *pushCount(n) { + if (n > 0) { + yield this.buffer.substr(this.pos, n); + this.pos += n; + return n; + } + return 0; + } + *pushToIndex(i, allowEmpty) { + const s = this.buffer.slice(this.pos, i); + if (s) { + yield s; + this.pos += s.length; + return s.length; + } + else if (allowEmpty) + yield ''; + return 0; + } + *pushIndicators() { + switch (this.charAt(0)) { + case '!': + return ((yield* this.pushTag()) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + case '&': + return ((yield* this.pushUntil(isNotAnchorChar)) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + case '-': // this is an error + case '?': // this is an error outside flow collections + case ':': { + const inFlow = this.flowLevel > 0; + const ch1 = this.charAt(1); + if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { + if (!inFlow) + this.indentNext = this.indentValue + 1; + else if (this.flowKey) + this.flowKey = false; + return ((yield* this.pushCount(1)) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + } + } + } + return 0; + } + *pushTag() { + if (this.charAt(1) === '<') { + let i = this.pos + 2; + let ch = this.buffer[i]; + while (!isEmpty(ch) && ch !== '>') + ch = this.buffer[++i]; + return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); + } + else { + let i = this.pos + 1; + let ch = this.buffer[i]; + while (ch) { + if (tagChars.includes(ch)) + ch = this.buffer[++i]; + else if (ch === '%' && + hexDigits.includes(this.buffer[i + 1]) && + hexDigits.includes(this.buffer[i + 2])) { + ch = this.buffer[(i += 3)]; + } + else + break; + } + return yield* this.pushToIndex(i, false); + } + } + *pushNewline() { + const ch = this.buffer[this.pos]; + if (ch === '\n') + return yield* this.pushCount(1); + else if (ch === '\r' && this.charAt(1) === '\n') + return yield* this.pushCount(2); + else + return 0; + } + *pushSpaces(allowTabs) { + let i = this.pos - 1; + let ch; + do { + ch = this.buffer[++i]; + } while (ch === ' ' || (allowTabs && ch === '\t')); + const n = i - this.pos; + if (n > 0) { + yield this.buffer.substr(this.pos, n); + this.pos = i; + } + return n; + } + *pushUntil(test) { + let i = this.pos; + let ch = this.buffer[i]; + while (!test(ch)) + ch = this.buffer[++i]; + return yield* this.pushToIndex(i, false); + } +} + +exports.Lexer = Lexer; + + +/***/ }), + +/***/ 1929: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Tracks newlines during parsing in order to provide an efficient API for + * determining the one-indexed `{ line, col }` position for any offset + * within the input. + */ +class LineCounter { + constructor() { + this.lineStarts = []; + /** + * Should be called in ascending order. Otherwise, call + * `lineCounter.lineStarts.sort()` before calling `linePos()`. + */ + this.addNewLine = (offset) => this.lineStarts.push(offset); + /** + * Performs a binary search and returns the 1-indexed { line, col } + * position of `offset`. If `line === 0`, `addNewLine` has never been + * called or `offset` is before the first known newline. + */ + this.linePos = (offset) => { + let low = 0; + let high = this.lineStarts.length; + while (low < high) { + const mid = (low + high) >> 1; // Math.floor((low + high) / 2) + if (this.lineStarts[mid] < offset) + low = mid + 1; + else + high = mid; + } + if (this.lineStarts[low] === offset) + return { line: low + 1, col: 1 }; + if (low === 0) + return { line: 0, col: offset }; + const start = this.lineStarts[low - 1]; + return { line: low, col: offset - start + 1 }; + }; + } +} + +exports.LineCounter = LineCounter; + + +/***/ }), + +/***/ 3328: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cst = __nccwpck_require__(9169); +var lexer = __nccwpck_require__(5976); + +function includesToken(list, type) { + for (let i = 0; i < list.length; ++i) + if (list[i].type === type) + return true; + return false; +} +function findNonEmptyIndex(list) { + for (let i = 0; i < list.length; ++i) { + switch (list[i].type) { + case 'space': + case 'comment': + case 'newline': + break; + default: + return i; + } + } + return -1; +} +function isFlowToken(token) { + switch (token?.type) { + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + case 'flow-collection': + return true; + default: + return false; + } +} +function getPrevProps(parent) { + switch (parent.type) { + case 'document': + return parent.start; + case 'block-map': { + const it = parent.items[parent.items.length - 1]; + return it.sep ?? it.start; + } + case 'block-seq': + return parent.items[parent.items.length - 1].start; + /* istanbul ignore next should not happen */ + default: + return []; + } +} +/** Note: May modify input array */ +function getFirstKeyStartProps(prev) { + if (prev.length === 0) + return []; + let i = prev.length; + loop: while (--i >= 0) { + switch (prev[i].type) { + case 'doc-start': + case 'explicit-key-ind': + case 'map-value-ind': + case 'seq-item-ind': + case 'newline': + break loop; + } + } + while (prev[++i]?.type === 'space') { + /* loop */ + } + return prev.splice(i, prev.length); +} +function fixFlowSeqItems(fc) { + if (fc.start.type === 'flow-seq-start') { + for (const it of fc.items) { + if (it.sep && + !it.value && + !includesToken(it.start, 'explicit-key-ind') && + !includesToken(it.sep, 'map-value-ind')) { + if (it.key) + it.value = it.key; + delete it.key; + if (isFlowToken(it.value)) { + if (it.value.end) + Array.prototype.push.apply(it.value.end, it.sep); + else + it.value.end = it.sep; + } + else + Array.prototype.push.apply(it.start, it.sep); + delete it.sep; + } + } + } +} +/** + * A YAML concrete syntax tree (CST) parser + * + * ```ts + * const src: string = ... + * for (const token of new Parser().parse(src)) { + * // token: Token + * } + * ``` + * + * To use the parser with a user-provided lexer: + * + * ```ts + * function* parse(source: string, lexer: Lexer) { + * const parser = new Parser() + * for (const lexeme of lexer.lex(source)) + * yield* parser.next(lexeme) + * yield* parser.end() + * } + * + * const src: string = ... + * const lexer = new Lexer() + * for (const token of parse(src, lexer)) { + * // token: Token + * } + * ``` + */ +class Parser { + /** + * @param onNewLine - If defined, called separately with the start position of + * each new line (in `parse()`, including the start of input). + */ + constructor(onNewLine) { + /** If true, space and sequence indicators count as indentation */ + this.atNewLine = true; + /** If true, next token is a scalar value */ + this.atScalar = false; + /** Current indentation level */ + this.indent = 0; + /** Current offset since the start of parsing */ + this.offset = 0; + /** On the same line with a block map key */ + this.onKeyLine = false; + /** Top indicates the node that's currently being built */ + this.stack = []; + /** The source of the current token, set in parse() */ + this.source = ''; + /** The type of the current token, set in parse() */ + this.type = ''; + // Must be defined after `next()` + this.lexer = new lexer.Lexer(); + this.onNewLine = onNewLine; + } + /** + * Parse `source` as a YAML stream. + * If `incomplete`, a part of the last line may be left as a buffer for the next call. + * + * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. + * + * @returns A generator of tokens representing each directive, document, and other structure. + */ + *parse(source, incomplete = false) { + if (this.onNewLine && this.offset === 0) + this.onNewLine(0); + for (const lexeme of this.lexer.lex(source, incomplete)) + yield* this.next(lexeme); + if (!incomplete) + yield* this.end(); + } + /** + * Advance the parser by the `source` of one lexical token. + */ + *next(source) { + this.source = source; + if (process.env.LOG_TOKENS) + console.log('|', cst.prettyToken(source)); + if (this.atScalar) { + this.atScalar = false; + yield* this.step(); + this.offset += source.length; + return; + } + const type = cst.tokenType(source); + if (!type) { + const message = `Not a YAML token: ${source}`; + yield* this.pop({ type: 'error', offset: this.offset, message, source }); + this.offset += source.length; + } + else if (type === 'scalar') { + this.atNewLine = false; + this.atScalar = true; + this.type = 'scalar'; + } + else { + this.type = type; + yield* this.step(); + switch (type) { + case 'newline': + this.atNewLine = true; + this.indent = 0; + if (this.onNewLine) + this.onNewLine(this.offset + source.length); + break; + case 'space': + if (this.atNewLine && source[0] === ' ') + this.indent += source.length; + break; + case 'explicit-key-ind': + case 'map-value-ind': + case 'seq-item-ind': + if (this.atNewLine) + this.indent += source.length; + break; + case 'doc-mode': + case 'flow-error-end': + return; + default: + this.atNewLine = false; + } + this.offset += source.length; + } + } + /** Call at end of input to push out any remaining constructions */ + *end() { + while (this.stack.length > 0) + yield* this.pop(); + } + get sourceToken() { + const st = { + type: this.type, + offset: this.offset, + indent: this.indent, + source: this.source + }; + return st; + } + *step() { + const top = this.peek(1); + if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { + while (this.stack.length > 0) + yield* this.pop(); + this.stack.push({ + type: 'doc-end', + offset: this.offset, + source: this.source + }); + return; + } + if (!top) + return yield* this.stream(); + switch (top.type) { + case 'document': + return yield* this.document(top); + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return yield* this.scalar(top); + case 'block-scalar': + return yield* this.blockScalar(top); + case 'block-map': + return yield* this.blockMap(top); + case 'block-seq': + return yield* this.blockSequence(top); + case 'flow-collection': + return yield* this.flowCollection(top); + case 'doc-end': + return yield* this.documentEnd(top); + } + /* istanbul ignore next should not happen */ + yield* this.pop(); + } + peek(n) { + return this.stack[this.stack.length - n]; + } + *pop(error) { + const token = error ?? this.stack.pop(); + /* istanbul ignore if should not happen */ + if (!token) { + const message = 'Tried to pop an empty stack'; + yield { type: 'error', offset: this.offset, source: '', message }; + } + else if (this.stack.length === 0) { + yield token; + } + else { + const top = this.peek(1); + if (token.type === 'block-scalar') { + // Block scalars use their parent rather than header indent + token.indent = 'indent' in top ? top.indent : 0; + } + else if (token.type === 'flow-collection' && top.type === 'document') { + // Ignore all indent for top-level flow collections + token.indent = 0; + } + if (token.type === 'flow-collection') + fixFlowSeqItems(token); + switch (top.type) { + case 'document': + top.value = token; + break; + case 'block-scalar': + top.props.push(token); // error + break; + case 'block-map': { + const it = top.items[top.items.length - 1]; + if (it.value) { + top.items.push({ start: [], key: token, sep: [] }); + this.onKeyLine = true; + return; + } + else if (it.sep) { + it.value = token; + } + else { + Object.assign(it, { key: token, sep: [] }); + this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); + return; + } + break; + } + case 'block-seq': { + const it = top.items[top.items.length - 1]; + if (it.value) + top.items.push({ start: [], value: token }); + else + it.value = token; + break; + } + case 'flow-collection': { + const it = top.items[top.items.length - 1]; + if (!it || it.value) + top.items.push({ start: [], key: token, sep: [] }); + else if (it.sep) + it.value = token; + else + Object.assign(it, { key: token, sep: [] }); + return; + } + /* istanbul ignore next should not happen */ + default: + yield* this.pop(); + yield* this.pop(token); + } + if ((top.type === 'document' || + top.type === 'block-map' || + top.type === 'block-seq') && + (token.type === 'block-map' || token.type === 'block-seq')) { + const last = token.items[token.items.length - 1]; + if (last && + !last.sep && + !last.value && + last.start.length > 0 && + findNonEmptyIndex(last.start) === -1 && + (token.indent === 0 || + last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { + if (top.type === 'document') + top.end = last.start; + else + top.items.push({ start: last.start }); + token.items.splice(-1, 1); + } + } + } + } + *stream() { + switch (this.type) { + case 'directive-line': + yield { type: 'directive', offset: this.offset, source: this.source }; + return; + case 'byte-order-mark': + case 'space': + case 'comment': + case 'newline': + yield this.sourceToken; + return; + case 'doc-mode': + case 'doc-start': { + const doc = { + type: 'document', + offset: this.offset, + start: [] + }; + if (this.type === 'doc-start') + doc.start.push(this.sourceToken); + this.stack.push(doc); + return; + } + } + yield { + type: 'error', + offset: this.offset, + message: `Unexpected ${this.type} token in YAML stream`, + source: this.source + }; + } + *document(doc) { + if (doc.value) + return yield* this.lineEnd(doc); + switch (this.type) { + case 'doc-start': { + if (findNonEmptyIndex(doc.start) !== -1) { + yield* this.pop(); + yield* this.step(); + } + else + doc.start.push(this.sourceToken); + return; + } + case 'anchor': + case 'tag': + case 'space': + case 'comment': + case 'newline': + doc.start.push(this.sourceToken); + return; + } + const bv = this.startBlockValue(doc); + if (bv) + this.stack.push(bv); + else { + yield { + type: 'error', + offset: this.offset, + message: `Unexpected ${this.type} token in YAML document`, + source: this.source + }; + } + } + *scalar(scalar) { + if (this.type === 'map-value-ind') { + const prev = getPrevProps(this.peek(2)); + const start = getFirstKeyStartProps(prev); + let sep; + if (scalar.end) { + sep = scalar.end; + sep.push(this.sourceToken); + delete scalar.end; + } + else + sep = [this.sourceToken]; + const map = { + type: 'block-map', + offset: scalar.offset, + indent: scalar.indent, + items: [{ start, key: scalar, sep }] + }; + this.onKeyLine = true; + this.stack[this.stack.length - 1] = map; + } + else + yield* this.lineEnd(scalar); + } + *blockScalar(scalar) { + switch (this.type) { + case 'space': + case 'comment': + case 'newline': + scalar.props.push(this.sourceToken); + return; + case 'scalar': + scalar.source = this.source; + // block-scalar source includes trailing newline + this.atNewLine = true; + this.indent = 0; + if (this.onNewLine) { + let nl = this.source.indexOf('\n') + 1; + while (nl !== 0) { + this.onNewLine(this.offset + nl); + nl = this.source.indexOf('\n', nl) + 1; + } + } + yield* this.pop(); + break; + /* istanbul ignore next should not happen */ + default: + yield* this.pop(); + yield* this.step(); + } + } + *blockMap(map) { + const it = map.items[map.items.length - 1]; + // it.sep is true-ish if pair already has key or : separator + switch (this.type) { + case 'newline': + this.onKeyLine = false; + if (it.value) { + const end = 'end' in it.value ? it.value.end : undefined; + const last = Array.isArray(end) ? end[end.length - 1] : undefined; + if (last?.type === 'comment') + end?.push(this.sourceToken); + else + map.items.push({ start: [this.sourceToken] }); + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + it.start.push(this.sourceToken); + } + return; + case 'space': + case 'comment': + if (it.value) { + map.items.push({ start: [this.sourceToken] }); + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + if (this.atIndentedComment(it.start, map.indent)) { + const prev = map.items[map.items.length - 2]; + const end = prev?.value?.end; + if (Array.isArray(end)) { + Array.prototype.push.apply(end, it.start); + end.push(this.sourceToken); + map.items.pop(); + return; + } + } + it.start.push(this.sourceToken); + } + return; + } + if (this.indent >= map.indent) { + const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; + // For empty nodes, assign newline-separated not indented empty tokens to following node + let start = []; + if (atNextItem && it.sep && !it.value) { + const nl = []; + for (let i = 0; i < it.sep.length; ++i) { + const st = it.sep[i]; + switch (st.type) { + case 'newline': + nl.push(i); + break; + case 'space': + break; + case 'comment': + if (st.indent > map.indent) + nl.length = 0; + break; + default: + nl.length = 0; + } + } + if (nl.length >= 2) + start = it.sep.splice(nl[1]); + } + switch (this.type) { + case 'anchor': + case 'tag': + if (atNextItem || it.value) { + start.push(this.sourceToken); + map.items.push({ start }); + this.onKeyLine = true; + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + it.start.push(this.sourceToken); + } + return; + case 'explicit-key-ind': + if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { + it.start.push(this.sourceToken); + } + else if (atNextItem || it.value) { + start.push(this.sourceToken); + map.items.push({ start }); + } + else { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start: [this.sourceToken] }] + }); + } + this.onKeyLine = true; + return; + case 'map-value-ind': + if (includesToken(it.start, 'explicit-key-ind')) { + if (!it.sep) { + if (includesToken(it.start, 'newline')) { + Object.assign(it, { key: null, sep: [this.sourceToken] }); + } + else { + const start = getFirstKeyStartProps(it.start); + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }); + } + } + else if (it.value) { + map.items.push({ start: [], key: null, sep: [this.sourceToken] }); + } + else if (includesToken(it.sep, 'map-value-ind')) { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }); + } + else if (isFlowToken(it.key) && + !includesToken(it.sep, 'newline')) { + const start = getFirstKeyStartProps(it.start); + const key = it.key; + const sep = it.sep; + sep.push(this.sourceToken); + // @ts-expect-error type guard is wrong here + delete it.key, delete it.sep; + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key, sep }] + }); + } + else if (start.length > 0) { + // Not actually at next item + it.sep = it.sep.concat(start, this.sourceToken); + } + else { + it.sep.push(this.sourceToken); + } + } + else { + if (!it.sep) { + Object.assign(it, { key: null, sep: [this.sourceToken] }); + } + else if (it.value || atNextItem) { + map.items.push({ start, key: null, sep: [this.sourceToken] }); + } + else if (includesToken(it.sep, 'map-value-ind')) { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start: [], key: null, sep: [this.sourceToken] }] + }); + } + else { + it.sep.push(this.sourceToken); + } + } + this.onKeyLine = true; + return; + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': { + const fs = this.flowScalar(this.type); + if (atNextItem || it.value) { + map.items.push({ start, key: fs, sep: [] }); + this.onKeyLine = true; + } + else if (it.sep) { + this.stack.push(fs); + } + else { + Object.assign(it, { key: fs, sep: [] }); + this.onKeyLine = true; + } + return; + } + default: { + const bv = this.startBlockValue(map); + if (bv) { + if (atNextItem && + bv.type !== 'block-seq' && + includesToken(it.start, 'explicit-key-ind')) { + map.items.push({ start }); + } + this.stack.push(bv); + return; + } + } + } + } + yield* this.pop(); + yield* this.step(); + } + *blockSequence(seq) { + const it = seq.items[seq.items.length - 1]; + switch (this.type) { + case 'newline': + if (it.value) { + const end = 'end' in it.value ? it.value.end : undefined; + const last = Array.isArray(end) ? end[end.length - 1] : undefined; + if (last?.type === 'comment') + end?.push(this.sourceToken); + else + seq.items.push({ start: [this.sourceToken] }); + } + else + it.start.push(this.sourceToken); + return; + case 'space': + case 'comment': + if (it.value) + seq.items.push({ start: [this.sourceToken] }); + else { + if (this.atIndentedComment(it.start, seq.indent)) { + const prev = seq.items[seq.items.length - 2]; + const end = prev?.value?.end; + if (Array.isArray(end)) { + Array.prototype.push.apply(end, it.start); + end.push(this.sourceToken); + seq.items.pop(); + return; + } + } + it.start.push(this.sourceToken); + } + return; + case 'anchor': + case 'tag': + if (it.value || this.indent <= seq.indent) + break; + it.start.push(this.sourceToken); + return; + case 'seq-item-ind': + if (this.indent !== seq.indent) + break; + if (it.value || includesToken(it.start, 'seq-item-ind')) + seq.items.push({ start: [this.sourceToken] }); + else + it.start.push(this.sourceToken); + return; + } + if (this.indent > seq.indent) { + const bv = this.startBlockValue(seq); + if (bv) { + this.stack.push(bv); + return; + } + } + yield* this.pop(); + yield* this.step(); + } + *flowCollection(fc) { + const it = fc.items[fc.items.length - 1]; + if (this.type === 'flow-error-end') { + let top; + do { + yield* this.pop(); + top = this.peek(1); + } while (top && top.type === 'flow-collection'); + } + else if (fc.end.length === 0) { + switch (this.type) { + case 'comma': + case 'explicit-key-ind': + if (!it || it.sep) + fc.items.push({ start: [this.sourceToken] }); + else + it.start.push(this.sourceToken); + return; + case 'map-value-ind': + if (!it || it.value) + fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); + else if (it.sep) + it.sep.push(this.sourceToken); + else + Object.assign(it, { key: null, sep: [this.sourceToken] }); + return; + case 'space': + case 'comment': + case 'newline': + case 'anchor': + case 'tag': + if (!it || it.value) + fc.items.push({ start: [this.sourceToken] }); + else if (it.sep) + it.sep.push(this.sourceToken); + else + it.start.push(this.sourceToken); + return; + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': { + const fs = this.flowScalar(this.type); + if (!it || it.value) + fc.items.push({ start: [], key: fs, sep: [] }); + else if (it.sep) + this.stack.push(fs); + else + Object.assign(it, { key: fs, sep: [] }); + return; + } + case 'flow-map-end': + case 'flow-seq-end': + fc.end.push(this.sourceToken); + return; + } + const bv = this.startBlockValue(fc); + /* istanbul ignore else should not happen */ + if (bv) + this.stack.push(bv); + else { + yield* this.pop(); + yield* this.step(); + } + } + else { + const parent = this.peek(2); + if (parent.type === 'block-map' && + ((this.type === 'map-value-ind' && parent.indent === fc.indent) || + (this.type === 'newline' && + !parent.items[parent.items.length - 1].sep))) { + yield* this.pop(); + yield* this.step(); + } + else if (this.type === 'map-value-ind' && + parent.type !== 'flow-collection') { + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + fixFlowSeqItems(fc); + const sep = fc.end.splice(1, fc.end.length); + sep.push(this.sourceToken); + const map = { + type: 'block-map', + offset: fc.offset, + indent: fc.indent, + items: [{ start, key: fc, sep }] + }; + this.onKeyLine = true; + this.stack[this.stack.length - 1] = map; + } + else { + yield* this.lineEnd(fc); + } + } + } + flowScalar(type) { + if (this.onNewLine) { + let nl = this.source.indexOf('\n') + 1; + while (nl !== 0) { + this.onNewLine(this.offset + nl); + nl = this.source.indexOf('\n', nl) + 1; + } + } + return { + type, + offset: this.offset, + indent: this.indent, + source: this.source + }; + } + startBlockValue(parent) { + switch (this.type) { + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return this.flowScalar(this.type); + case 'block-scalar-header': + return { + type: 'block-scalar', + offset: this.offset, + indent: this.indent, + props: [this.sourceToken], + source: '' + }; + case 'flow-map-start': + case 'flow-seq-start': + return { + type: 'flow-collection', + offset: this.offset, + indent: this.indent, + start: this.sourceToken, + items: [], + end: [] + }; + case 'seq-item-ind': + return { + type: 'block-seq', + offset: this.offset, + indent: this.indent, + items: [{ start: [this.sourceToken] }] + }; + case 'explicit-key-ind': { + this.onKeyLine = true; + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + start.push(this.sourceToken); + return { + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start }] + }; + } + case 'map-value-ind': { + this.onKeyLine = true; + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + return { + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }; + } + } + return null; + } + atIndentedComment(start, indent) { + if (this.type !== 'comment') + return false; + if (this.indent <= indent) + return false; + return start.every(st => st.type === 'newline' || st.type === 'space'); + } + *documentEnd(docEnd) { + if (this.type !== 'doc-mode') { + if (docEnd.end) + docEnd.end.push(this.sourceToken); + else + docEnd.end = [this.sourceToken]; + if (this.type === 'newline') + yield* this.pop(); + } + } + *lineEnd(token) { + switch (this.type) { + case 'comma': + case 'doc-start': + case 'doc-end': + case 'flow-seq-end': + case 'flow-map-end': + case 'map-value-ind': + yield* this.pop(); + yield* this.step(); + break; + case 'newline': + this.onKeyLine = false; + // fallthrough + case 'space': + case 'comment': + default: + // all other values are errors + if (token.end) + token.end.push(this.sourceToken); + else + token.end = [this.sourceToken]; + if (this.type === 'newline') + yield* this.pop(); + } + } +} + +exports.Parser = Parser; + + +/***/ }), + +/***/ 8649: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var composer = __nccwpck_require__(9493); +var Document = __nccwpck_require__(42); +var errors = __nccwpck_require__(4236); +var log = __nccwpck_require__(6909); +var lineCounter = __nccwpck_require__(1929); +var parser = __nccwpck_require__(3328); + +function parseOptions(options) { + const prettyErrors = options.prettyErrors !== false; + const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; + return { lineCounter: lineCounter$1, prettyErrors }; +} +/** + * Parse the input as a stream of YAML documents. + * + * Documents should be separated from each other by `...` or `---` marker lines. + * + * @returns If an empty `docs` array is returned, it will be of type + * EmptyStream and contain additional stream information. In + * TypeScript, you should use `'empty' in docs` as a type guard for it. + */ +function parseAllDocuments(source, options = {}) { + const { lineCounter, prettyErrors } = parseOptions(options); + const parser$1 = new parser.Parser(lineCounter?.addNewLine); + const composer$1 = new composer.Composer(options); + const docs = Array.from(composer$1.compose(parser$1.parse(source))); + if (prettyErrors && lineCounter) + for (const doc of docs) { + doc.errors.forEach(errors.prettifyError(source, lineCounter)); + doc.warnings.forEach(errors.prettifyError(source, lineCounter)); + } + if (docs.length > 0) + return docs; + return Object.assign([], { empty: true }, composer$1.streamInfo()); +} +/** Parse an input string into a single YAML.Document */ +function parseDocument(source, options = {}) { + const { lineCounter, prettyErrors } = parseOptions(options); + const parser$1 = new parser.Parser(lineCounter?.addNewLine); + const composer$1 = new composer.Composer(options); + // `doc` is always set by compose.end(true) at the very latest + let doc = null; + for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { + if (!doc) + doc = _doc; + else if (doc.options.logLevel !== 'silent') { + doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); + break; + } + } + if (prettyErrors && lineCounter) { + doc.errors.forEach(errors.prettifyError(source, lineCounter)); + doc.warnings.forEach(errors.prettifyError(source, lineCounter)); + } + return doc; +} +function parse(src, reviver, options) { + let _reviver = undefined; + if (typeof reviver === 'function') { + _reviver = reviver; + } + else if (options === undefined && reviver && typeof reviver === 'object') { + options = reviver; + } + const doc = parseDocument(src, options); + if (!doc) + return null; + doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); + if (doc.errors.length > 0) { + if (doc.options.logLevel !== 'silent') + throw doc.errors[0]; + else + doc.errors = []; + } + return doc.toJS(Object.assign({ reviver: _reviver }, options)); +} +function stringify(value, replacer, options) { + let _replacer = null; + if (typeof replacer === 'function' || Array.isArray(replacer)) { + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + } + if (typeof options === 'string') + options = options.length; + if (typeof options === 'number') { + const indent = Math.round(options); + options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; + } + if (value === undefined) { + const { keepUndefined } = options ?? replacer ?? {}; + if (!keepUndefined) + return undefined; + } + return new Document.Document(value, _replacer, options).toString(options); +} + +exports.parse = parse; +exports.parseAllDocuments = parseAllDocuments; +exports.parseDocument = parseDocument; +exports.stringify = stringify; + + +/***/ }), + +/***/ 6831: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var map = __nccwpck_require__(83); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var tags = __nccwpck_require__(4138); + +const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; +class Schema { + constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { + this.compat = Array.isArray(compat) + ? tags.getTags(compat, 'compat') + : compat + ? tags.getTags(null, compat) + : null; + this.merge = !!merge; + this.name = (typeof schema === 'string' && schema) || 'core'; + this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; + this.tags = tags.getTags(customTags, this.name); + this.toStringOptions = toStringDefaults ?? null; + Object.defineProperty(this, identity.MAP, { value: map.map }); + Object.defineProperty(this, identity.SCALAR, { value: string.string }); + Object.defineProperty(this, identity.SEQ, { value: seq.seq }); + // Used by createMap() + this.sortMapEntries = + typeof sortMapEntries === 'function' + ? sortMapEntries + : sortMapEntries === true + ? sortMapEntriesByKey + : null; + } + clone() { + const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); + copy.tags = this.tags.slice(); + return copy; + } +} + +exports.Schema = Schema; + + +/***/ }), + +/***/ 83: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var YAMLMap = __nccwpck_require__(6011); + +const map = { + collection: 'map', + default: true, + nodeClass: YAMLMap.YAMLMap, + tag: 'tag:yaml.org,2002:map', + resolve(map, onError) { + if (!identity.isMap(map)) + onError('Expected a mapping for this tag'); + return map; + }, + createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx) +}; + +exports.map = map; + + +/***/ }), + +/***/ 6703: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +const nullTag = { + identify: value => value == null, + createNode: () => new Scalar.Scalar(null), + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => new Scalar.Scalar(null), + stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) + ? source + : ctx.options.nullStr +}; + +exports.nullTag = nullTag; + + +/***/ }), + +/***/ 1693: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var YAMLSeq = __nccwpck_require__(5161); + +const seq = { + collection: 'seq', + default: true, + nodeClass: YAMLSeq.YAMLSeq, + tag: 'tag:yaml.org,2002:seq', + resolve(seq, onError) { + if (!identity.isSeq(seq)) + onError('Expected a sequence for this tag'); + return seq; + }, + createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx) +}; + +exports.seq = seq; + + +/***/ }), + +/***/ 2201: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyString = __nccwpck_require__(6226); + +const string = { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: str => str, + stringify(item, ctx, onComment, onChompKeep) { + ctx = Object.assign({ actualString: true }, ctx); + return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); + } +}; + +exports.string = string; + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +const boolTag = { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, + resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), + stringify({ source, value }, ctx) { + if (source && boolTag.test.test(source)) { + const sv = source[0] === 't' || source[0] === 'T'; + if (value === sv) + return source; + } + return value ? ctx.options.trueStr : ctx.options.falseStr; + } +}; + +exports.boolTag = boolTag; + + +/***/ }), + +/***/ 6810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyNumber = __nccwpck_require__(4174); + +const floatNaN = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, + resolve: str => str.slice(-3).toLowerCase() === 'nan' + ? NaN + : str[0] === '-' + ? Number.NEGATIVE_INFINITY + : Number.POSITIVE_INFINITY, + stringify: stringifyNumber.stringifyNumber +}; +const floatExp = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str), + stringify(node) { + const num = Number(node.value); + return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); + } +}; +const float = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, + resolve(str) { + const node = new Scalar.Scalar(parseFloat(str)); + const dot = str.indexOf('.'); + if (dot !== -1 && str[str.length - 1] === '0') + node.minFractionDigits = str.length - dot - 1; + return node; + }, + stringify: stringifyNumber.stringifyNumber +}; + +exports.float = float; +exports.floatExp = floatExp; +exports.floatNaN = floatNaN; + + +/***/ }), + +/***/ 3019: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); +const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); +function intStringify(node, radix, prefix) { + const { value } = node; + if (intIdentify(value) && value >= 0) + return prefix + value.toString(radix); + return stringifyNumber.stringifyNumber(node); +} +const intOct = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^0o[0-7]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), + stringify: node => intStringify(node, 8, '0o') +}; +const int = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9]+$/, + resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), + stringify: stringifyNumber.stringifyNumber +}; +const intHex = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^0x[0-9a-fA-F]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), + stringify: node => intStringify(node, 16, '0x') +}; + +exports.int = int; +exports.intHex = intHex; +exports.intOct = intOct; + + +/***/ }), + +/***/ 27: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var bool = __nccwpck_require__(2045); +var float = __nccwpck_require__(6810); +var int = __nccwpck_require__(3019); + +const schema = [ + map.map, + seq.seq, + string.string, + _null.nullTag, + bool.boolTag, + int.intOct, + int.int, + int.intHex, + float.floatNaN, + float.floatExp, + float.float +]; + +exports.schema = schema; + + +/***/ }), + +/***/ 4545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var map = __nccwpck_require__(83); +var seq = __nccwpck_require__(1693); + +function intIdentify(value) { + return typeof value === 'bigint' || Number.isInteger(value); +} +const stringifyJSON = ({ value }) => JSON.stringify(value); +const jsonScalars = [ + { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: str => str, + stringify: stringifyJSON + }, + { + identify: value => value == null, + createNode: () => new Scalar.Scalar(null), + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^null$/, + resolve: () => null, + stringify: stringifyJSON + }, + { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^true|false$/, + resolve: str => str === 'true', + stringify: stringifyJSON + }, + { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^-?(?:0|[1-9][0-9]*)$/, + resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), + stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) + }, + { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, + resolve: str => parseFloat(str), + stringify: stringifyJSON + } +]; +const jsonError = { + default: true, + tag: '', + test: /^/, + resolve(str, onError) { + onError(`Unresolved plain scalar ${JSON.stringify(str)}`); + return str; + } +}; +const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); + +exports.schema = schema; + + +/***/ }), + +/***/ 4138: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var bool = __nccwpck_require__(2045); +var float = __nccwpck_require__(6810); +var int = __nccwpck_require__(3019); +var schema = __nccwpck_require__(27); +var schema$1 = __nccwpck_require__(4545); +var binary = __nccwpck_require__(5724); +var omap = __nccwpck_require__(8974); +var pairs = __nccwpck_require__(9841); +var schema$2 = __nccwpck_require__(5389); +var set = __nccwpck_require__(7847); +var timestamp = __nccwpck_require__(1156); + +const schemas = new Map([ + ['core', schema.schema], + ['failsafe', [map.map, seq.seq, string.string]], + ['json', schema$1.schema], + ['yaml11', schema$2.schema], + ['yaml-1.1', schema$2.schema] +]); +const tagsByName = { + binary: binary.binary, + bool: bool.boolTag, + float: float.float, + floatExp: float.floatExp, + floatNaN: float.floatNaN, + floatTime: timestamp.floatTime, + int: int.int, + intHex: int.intHex, + intOct: int.intOct, + intTime: timestamp.intTime, + map: map.map, + null: _null.nullTag, + omap: omap.omap, + pairs: pairs.pairs, + seq: seq.seq, + set: set.set, + timestamp: timestamp.timestamp +}; +const coreKnownTags = { + 'tag:yaml.org,2002:binary': binary.binary, + 'tag:yaml.org,2002:omap': omap.omap, + 'tag:yaml.org,2002:pairs': pairs.pairs, + 'tag:yaml.org,2002:set': set.set, + 'tag:yaml.org,2002:timestamp': timestamp.timestamp +}; +function getTags(customTags, schemaName) { + let tags = schemas.get(schemaName); + if (!tags) { + if (Array.isArray(customTags)) + tags = []; + else { + const keys = Array.from(schemas.keys()) + .filter(key => key !== 'yaml11') + .map(key => JSON.stringify(key)) + .join(', '); + throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); + } + } + if (Array.isArray(customTags)) { + for (const tag of customTags) + tags = tags.concat(tag); + } + else if (typeof customTags === 'function') { + tags = customTags(tags.slice()); + } + return tags.map(tag => { + if (typeof tag !== 'string') + return tag; + const tagObj = tagsByName[tag]; + if (tagObj) + return tagObj; + const keys = Object.keys(tagsByName) + .map(key => JSON.stringify(key)) + .join(', '); + throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); + }); +} + +exports.coreKnownTags = coreKnownTags; +exports.getTags = getTags; + + +/***/ }), + +/***/ 5724: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyString = __nccwpck_require__(6226); + +const binary = { + identify: value => value instanceof Uint8Array, + default: false, + tag: 'tag:yaml.org,2002:binary', + /** + * Returns a Buffer in node and an Uint8Array in browsers + * + * To use the resulting buffer as an image, you'll want to do something like: + * + * const blob = new Blob([buffer], { type: 'image/jpeg' }) + * document.querySelector('#photo').src = URL.createObjectURL(blob) + */ + resolve(src, onError) { + if (typeof Buffer === 'function') { + return Buffer.from(src, 'base64'); + } + else if (typeof atob === 'function') { + // On IE 11, atob() can't handle newlines + const str = atob(src.replace(/[\n\r]/g, '')); + const buffer = new Uint8Array(str.length); + for (let i = 0; i < str.length; ++i) + buffer[i] = str.charCodeAt(i); + return buffer; + } + else { + onError('This environment does not support reading binary tags; either Buffer or atob is required'); + return src; + } + }, + stringify({ comment, type, value }, ctx, onComment, onChompKeep) { + const buf = value; // checked earlier by binary.identify() + let str; + if (typeof Buffer === 'function') { + str = + buf instanceof Buffer + ? buf.toString('base64') + : Buffer.from(buf.buffer).toString('base64'); + } + else if (typeof btoa === 'function') { + let s = ''; + for (let i = 0; i < buf.length; ++i) + s += String.fromCharCode(buf[i]); + str = btoa(s); + } + else { + throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); + } + if (!type) + type = Scalar.Scalar.BLOCK_LITERAL; + if (type !== Scalar.Scalar.QUOTE_DOUBLE) { + const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); + const n = Math.ceil(str.length / lineWidth); + const lines = new Array(n); + for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { + lines[i] = str.substr(o, lineWidth); + } + str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); + } + return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); + } +}; + +exports.binary = binary; + + +/***/ }), + +/***/ 2631: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +function boolStringify({ value, source }, ctx) { + const boolObj = value ? trueTag : falseTag; + if (source && boolObj.test.test(source)) + return source; + return value ? ctx.options.trueStr : ctx.options.falseStr; +} +const trueTag = { + identify: value => value === true, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, + resolve: () => new Scalar.Scalar(true), + stringify: boolStringify +}; +const falseTag = { + identify: value => value === false, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, + resolve: () => new Scalar.Scalar(false), + stringify: boolStringify +}; + +exports.falseTag = falseTag; +exports.trueTag = trueTag; + + +/***/ }), + +/***/ 8035: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyNumber = __nccwpck_require__(4174); + +const floatNaN = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, + resolve: (str) => str.slice(-3).toLowerCase() === 'nan' + ? NaN + : str[0] === '-' + ? Number.NEGATIVE_INFINITY + : Number.POSITIVE_INFINITY, + stringify: stringifyNumber.stringifyNumber +}; +const floatExp = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, + resolve: (str) => parseFloat(str.replace(/_/g, '')), + stringify(node) { + const num = Number(node.value); + return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); + } +}; +const float = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, + resolve(str) { + const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); + const dot = str.indexOf('.'); + if (dot !== -1) { + const f = str.substring(dot + 1).replace(/_/g, ''); + if (f[f.length - 1] === '0') + node.minFractionDigits = f.length; + } + return node; + }, + stringify: stringifyNumber.stringifyNumber +}; + +exports.float = float; +exports.floatExp = floatExp; +exports.floatNaN = floatNaN; + + +/***/ }), + +/***/ 9503: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); +function intResolve(str, offset, radix, { intAsBigInt }) { + const sign = str[0]; + if (sign === '-' || sign === '+') + offset += 1; + str = str.substring(offset).replace(/_/g, ''); + if (intAsBigInt) { + switch (radix) { + case 2: + str = `0b${str}`; + break; + case 8: + str = `0o${str}`; + break; + case 16: + str = `0x${str}`; + break; + } + const n = BigInt(str); + return sign === '-' ? BigInt(-1) * n : n; + } + const n = parseInt(str, radix); + return sign === '-' ? -1 * n : n; +} +function intStringify(node, radix, prefix) { + const { value } = node; + if (intIdentify(value)) { + const str = value.toString(radix); + return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; + } + return stringifyNumber.stringifyNumber(node); +} +const intBin = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'BIN', + test: /^[-+]?0b[0-1_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), + stringify: node => intStringify(node, 2, '0b') +}; +const intOct = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^[-+]?0[0-7_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), + stringify: node => intStringify(node, 8, '0') +}; +const int = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9][0-9_]*$/, + resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), + stringify: stringifyNumber.stringifyNumber +}; +const intHex = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^[-+]?0x[0-9a-fA-F_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), + stringify: node => intStringify(node, 16, '0x') +}; + +exports.int = int; +exports.intBin = intBin; +exports.intHex = intHex; +exports.intOct = intOct; + + +/***/ }), + +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var toJS = __nccwpck_require__(2463); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var pairs = __nccwpck_require__(9841); + +class YAMLOMap extends YAMLSeq.YAMLSeq { + constructor() { + super(); + this.add = YAMLMap.YAMLMap.prototype.add.bind(this); + this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); + this.get = YAMLMap.YAMLMap.prototype.get.bind(this); + this.has = YAMLMap.YAMLMap.prototype.has.bind(this); + this.set = YAMLMap.YAMLMap.prototype.set.bind(this); + this.tag = YAMLOMap.tag; + } + /** + * If `ctx` is given, the return type is actually `Map`, + * but TypeScript won't allow widening the signature of a child method. + */ + toJSON(_, ctx) { + if (!ctx) + return super.toJSON(_); + const map = new Map(); + if (ctx?.onCreate) + ctx.onCreate(map); + for (const pair of this.items) { + let key, value; + if (identity.isPair(pair)) { + key = toJS.toJS(pair.key, '', ctx); + value = toJS.toJS(pair.value, key, ctx); + } + else { + key = toJS.toJS(pair, '', ctx); + } + if (map.has(key)) + throw new Error('Ordered maps must not include duplicate keys'); + map.set(key, value); + } + return map; + } + static from(schema, iterable, ctx) { + const pairs$1 = pairs.createPairs(schema, iterable, ctx); + const omap = new this(); + omap.items = pairs$1.items; + return omap; + } +} +YAMLOMap.tag = 'tag:yaml.org,2002:omap'; +const omap = { + collection: 'seq', + identify: value => value instanceof Map, + nodeClass: YAMLOMap, + default: false, + tag: 'tag:yaml.org,2002:omap', + resolve(seq, onError) { + const pairs$1 = pairs.resolvePairs(seq, onError); + const seenKeys = []; + for (const { key } of pairs$1.items) { + if (identity.isScalar(key)) { + if (seenKeys.includes(key.value)) { + onError(`Ordered maps must not include duplicate keys: ${key.value}`); + } + else { + seenKeys.push(key.value); + } + } + } + return Object.assign(new YAMLOMap(), pairs$1); + }, + createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) +}; + +exports.YAMLOMap = YAMLOMap; +exports.omap = omap; + + +/***/ }), + +/***/ 9841: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); +var YAMLSeq = __nccwpck_require__(5161); + +function resolvePairs(seq, onError) { + if (identity.isSeq(seq)) { + for (let i = 0; i < seq.items.length; ++i) { + let item = seq.items[i]; + if (identity.isPair(item)) + continue; + else if (identity.isMap(item)) { + if (item.items.length > 1) + onError('Each pair must have its own sequence indicator'); + const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); + if (item.commentBefore) + pair.key.commentBefore = pair.key.commentBefore + ? `${item.commentBefore}\n${pair.key.commentBefore}` + : item.commentBefore; + if (item.comment) { + const cn = pair.value ?? pair.key; + cn.comment = cn.comment + ? `${item.comment}\n${cn.comment}` + : item.comment; + } + item = pair; + } + seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item); + } + } + else + onError('Expected a sequence for this tag'); + return seq; +} +function createPairs(schema, iterable, ctx) { + const { replacer } = ctx; + const pairs = new YAMLSeq.YAMLSeq(schema); + pairs.tag = 'tag:yaml.org,2002:pairs'; + let i = 0; + if (iterable && Symbol.iterator in Object(iterable)) + for (let it of iterable) { + if (typeof replacer === 'function') + it = replacer.call(iterable, String(i++), it); + let key, value; + if (Array.isArray(it)) { + if (it.length === 2) { + key = it[0]; + value = it[1]; + } + else + throw new TypeError(`Expected [key, value] tuple: ${it}`); + } + else if (it && it instanceof Object) { + const keys = Object.keys(it); + if (keys.length === 1) { + key = keys[0]; + value = it[key]; + } + else { + throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); + } + } + else { + key = it; + } + pairs.items.push(Pair.createPair(key, value, ctx)); + } + return pairs; +} +const pairs = { + collection: 'seq', + default: false, + tag: 'tag:yaml.org,2002:pairs', + resolve: resolvePairs, + createNode: createPairs +}; + +exports.createPairs = createPairs; +exports.pairs = pairs; +exports.resolvePairs = resolvePairs; + + +/***/ }), + +/***/ 5389: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var binary = __nccwpck_require__(5724); +var bool = __nccwpck_require__(2631); +var float = __nccwpck_require__(8035); +var int = __nccwpck_require__(9503); +var omap = __nccwpck_require__(8974); +var pairs = __nccwpck_require__(9841); +var set = __nccwpck_require__(7847); +var timestamp = __nccwpck_require__(1156); + +const schema = [ + map.map, + seq.seq, + string.string, + _null.nullTag, + bool.trueTag, + bool.falseTag, + int.intBin, + int.intOct, + int.int, + int.intHex, + float.floatNaN, + float.floatExp, + float.float, + binary.binary, + omap.omap, + pairs.pairs, + set.set, + timestamp.intTime, + timestamp.floatTime, + timestamp.timestamp +]; + +exports.schema = schema; + + +/***/ }), + +/***/ 7847: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); + +class YAMLSet extends YAMLMap.YAMLMap { + constructor(schema) { + super(schema); + this.tag = YAMLSet.tag; + } + add(key) { + let pair; + if (identity.isPair(key)) + pair = key; + else if (key && + typeof key === 'object' && + 'key' in key && + 'value' in key && + key.value === null) + pair = new Pair.Pair(key.key, null); + else + pair = new Pair.Pair(key, null); + const prev = YAMLMap.findPair(this.items, pair.key); + if (!prev) + this.items.push(pair); + } + /** + * If `keepPair` is `true`, returns the Pair matching `key`. + * Otherwise, returns the value of that Pair's key. + */ + get(key, keepPair) { + const pair = YAMLMap.findPair(this.items, key); + return !keepPair && identity.isPair(pair) + ? identity.isScalar(pair.key) + ? pair.key.value + : pair.key + : pair; + } + set(key, value) { + if (typeof value !== 'boolean') + throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); + const prev = YAMLMap.findPair(this.items, key); + if (prev && !value) { + this.items.splice(this.items.indexOf(prev), 1); + } + else if (!prev && value) { + this.items.push(new Pair.Pair(key)); + } + } + toJSON(_, ctx) { + return super.toJSON(_, ctx, Set); + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + if (this.hasAllNullValues(true)) + return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); + else + throw new Error('Set items must all have null values'); + } + static from(schema, iterable, ctx) { + const { replacer } = ctx; + const set = new this(schema); + if (iterable && Symbol.iterator in Object(iterable)) + for (let value of iterable) { + if (typeof replacer === 'function') + value = replacer.call(iterable, value, value); + set.items.push(Pair.createPair(value, null, ctx)); + } + return set; + } +} +YAMLSet.tag = 'tag:yaml.org,2002:set'; +const set = { + collection: 'map', + identify: value => value instanceof Set, + nodeClass: YAMLSet, + default: false, + tag: 'tag:yaml.org,2002:set', + createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), + resolve(map, onError) { + if (identity.isMap(map)) { + if (map.hasAllNullValues(true)) + return Object.assign(new YAMLSet(), map); + else + onError('Set items must all have null values'); + } + else + onError('Expected a mapping for this tag'); + return map; + } +}; + +exports.YAMLSet = YAMLSet; +exports.set = set; + + +/***/ }), + +/***/ 1156: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +/** Internal types handle bigint as number, because TS can't figure it out. */ +function parseSexagesimal(str, asBigInt) { + const sign = str[0]; + const parts = sign === '-' || sign === '+' ? str.substring(1) : str; + const num = (n) => asBigInt ? BigInt(n) : Number(n); + const res = parts + .replace(/_/g, '') + .split(':') + .reduce((res, p) => res * num(60) + num(p), num(0)); + return (sign === '-' ? num(-1) * res : res); +} +/** + * hhhh:mm:ss.sss + * + * Internal types handle bigint as number, because TS can't figure it out. + */ +function stringifySexagesimal(node) { + let { value } = node; + let num = (n) => n; + if (typeof value === 'bigint') + num = n => BigInt(n); + else if (isNaN(value) || !isFinite(value)) + return stringifyNumber.stringifyNumber(node); + let sign = ''; + if (value < 0) { + sign = '-'; + value *= num(-1); + } + const _60 = num(60); + const parts = [value % _60]; // seconds, including ms + if (value < 60) { + parts.unshift(0); // at least one : is required + } + else { + value = (value - parts[0]) / _60; + parts.unshift(value % _60); // minutes + if (value >= 60) { + value = (value - parts[0]) / _60; + parts.unshift(value); // hours + } + } + return (sign + + parts + .map(n => String(n).padStart(2, '0')) + .join(':') + .replace(/000000\d*$/, '') // % 60 may introduce error + ); +} +const intTime = { + identify: value => typeof value === 'bigint' || Number.isInteger(value), + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'TIME', + test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, + resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), + stringify: stringifySexagesimal +}; +const floatTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'TIME', + test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, + resolve: str => parseSexagesimal(str, false), + stringify: stringifySexagesimal +}; +const timestamp = { + identify: value => value instanceof Date, + default: true, + tag: 'tag:yaml.org,2002:timestamp', + // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part + // may be omitted altogether, resulting in a date format. In such a case, the time part is + // assumed to be 00:00:00Z (start of day, UTC). + test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd + '(?:' + // time is optional + '(?:t|T|[ \\t]+)' + // t | T | whitespace + '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? + '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 + ')?$'), + resolve(str) { + const match = str.match(timestamp.test); + if (!match) + throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); + const [, year, month, day, hour, minute, second] = match.map(Number); + const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; + let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); + const tz = match[8]; + if (tz && tz !== 'Z') { + let d = parseSexagesimal(tz, false); + if (Math.abs(d) < 30) + d *= 60; + date -= 60000 * d; + } + return new Date(date); + }, + stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') +}; + +exports.floatTime = floatTime; +exports.intTime = intTime; +exports.timestamp = timestamp; + + +/***/ }), + +/***/ 2889: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const FOLD_FLOW = 'flow'; +const FOLD_BLOCK = 'block'; +const FOLD_QUOTED = 'quoted'; +/** + * Tries to keep input at up to `lineWidth` characters, splitting only on spaces + * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are + * terminated with `\n` and started with `indent`. + */ +function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { + if (!lineWidth || lineWidth < 0) + return text; + const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); + if (text.length <= endStep) + return text; + const folds = []; + const escapedFolds = {}; + let end = lineWidth - indent.length; + if (typeof indentAtStart === 'number') { + if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) + folds.push(0); + else + end = lineWidth - indentAtStart; + } + let split = undefined; + let prev = undefined; + let overflow = false; + let i = -1; + let escStart = -1; + let escEnd = -1; + if (mode === FOLD_BLOCK) { + i = consumeMoreIndentedLines(text, i); + if (i !== -1) + end = i + endStep; + } + for (let ch; (ch = text[(i += 1)]);) { + if (mode === FOLD_QUOTED && ch === '\\') { + escStart = i; + switch (text[i + 1]) { + case 'x': + i += 3; + break; + case 'u': + i += 5; + break; + case 'U': + i += 9; + break; + default: + i += 1; + } + escEnd = i; + } + if (ch === '\n') { + if (mode === FOLD_BLOCK) + i = consumeMoreIndentedLines(text, i); + end = i + endStep; + split = undefined; + } + else { + if (ch === ' ' && + prev && + prev !== ' ' && + prev !== '\n' && + prev !== '\t') { + // space surrounded by non-space can be replaced with newline + indent + const next = text[i + 1]; + if (next && next !== ' ' && next !== '\n' && next !== '\t') + split = i; + } + if (i >= end) { + if (split) { + folds.push(split); + end = split + endStep; + split = undefined; + } + else if (mode === FOLD_QUOTED) { + // white-space collected at end may stretch past lineWidth + while (prev === ' ' || prev === '\t') { + prev = ch; + ch = text[(i += 1)]; + overflow = true; + } + // Account for newline escape, but don't break preceding escape + const j = i > escEnd + 1 ? i - 2 : escStart - 1; + // Bail out if lineWidth & minContentWidth are shorter than an escape string + if (escapedFolds[j]) + return text; + folds.push(j); + escapedFolds[j] = true; + end = j + endStep; + split = undefined; + } + else { + overflow = true; + } + } + } + prev = ch; + } + if (overflow && onOverflow) + onOverflow(); + if (folds.length === 0) + return text; + if (onFold) + onFold(); + let res = text.slice(0, folds[0]); + for (let i = 0; i < folds.length; ++i) { + const fold = folds[i]; + const end = folds[i + 1] || text.length; + if (fold === 0) + res = `\n${indent}${text.slice(0, end)}`; + else { + if (mode === FOLD_QUOTED && escapedFolds[fold]) + res += `${text[fold]}\\`; + res += `\n${indent}${text.slice(fold + 1, end)}`; + } + } + return res; +} +/** + * Presumes `i + 1` is at the start of a line + * @returns index of last newline in more-indented block + */ +function consumeMoreIndentedLines(text, i) { + let ch = text[i + 1]; + while (ch === ' ' || ch === '\t') { + do { + ch = text[(i += 1)]; + } while (ch && ch !== '\n'); + ch = text[i + 1]; + } + return i; +} + +exports.FOLD_BLOCK = FOLD_BLOCK; +exports.FOLD_FLOW = FOLD_FLOW; +exports.FOLD_QUOTED = FOLD_QUOTED; +exports.foldFlowLines = foldFlowLines; + + +/***/ }), + +/***/ 8409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var anchors = __nccwpck_require__(8459); +var identity = __nccwpck_require__(5589); +var stringifyComment = __nccwpck_require__(5182); +var stringifyString = __nccwpck_require__(6226); + +function createStringifyContext(doc, options) { + const opt = Object.assign({ + blockQuote: true, + commentString: stringifyComment.stringifyComment, + defaultKeyType: null, + defaultStringType: 'PLAIN', + directives: null, + doubleQuotedAsJSON: false, + doubleQuotedMinMultiLineLength: 40, + falseStr: 'false', + flowCollectionPadding: true, + indentSeq: true, + lineWidth: 80, + minContentWidth: 20, + nullStr: 'null', + simpleKeys: false, + singleQuote: null, + trueStr: 'true', + verifyAliasOrder: true + }, doc.schema.toStringOptions, options); + let inFlow; + switch (opt.collectionStyle) { + case 'block': + inFlow = false; + break; + case 'flow': + inFlow = true; + break; + default: + inFlow = null; + } + return { + anchors: new Set(), + doc, + flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', + indent: '', + indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', + inFlow, + options: opt + }; +} +function getTagObject(tags, item) { + if (item.tag) { + const match = tags.filter(t => t.tag === item.tag); + if (match.length > 0) + return match.find(t => t.format === item.format) ?? match[0]; + } + let tagObj = undefined; + let obj; + if (identity.isScalar(item)) { + obj = item.value; + const match = tags.filter(t => t.identify?.(obj)); + tagObj = + match.find(t => t.format === item.format) ?? match.find(t => !t.format); + } + else { + obj = item; + tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); + } + if (!tagObj) { + const name = obj?.constructor?.name ?? typeof obj; + throw new Error(`Tag not resolved for ${name} value`); + } + return tagObj; +} +// needs to be called before value stringifier to allow for circular anchor refs +function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { + if (!doc.directives) + return ''; + const props = []; + const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor; + if (anchor && anchors.anchorIsValid(anchor)) { + anchors$1.add(anchor); + props.push(`&${anchor}`); + } + const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; + if (tag) + props.push(doc.directives.tagString(tag)); + return props.join(' '); +} +function stringify(item, ctx, onComment, onChompKeep) { + if (identity.isPair(item)) + return item.toString(ctx, onComment, onChompKeep); + if (identity.isAlias(item)) { + if (ctx.doc.directives) + return item.toString(ctx); + if (ctx.resolvedAliases?.has(item)) { + throw new TypeError(`Cannot stringify circular structure without alias nodes`); + } + else { + if (ctx.resolvedAliases) + ctx.resolvedAliases.add(item); + else + ctx.resolvedAliases = new Set([item]); + item = item.resolve(ctx.doc); + } + } + let tagObj = undefined; + const node = identity.isNode(item) + ? item + : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); + if (!tagObj) + tagObj = getTagObject(ctx.doc.schema.tags, node); + const props = stringifyProps(node, tagObj, ctx); + if (props.length > 0) + ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; + const str = typeof tagObj.stringify === 'function' + ? tagObj.stringify(node, ctx, onComment, onChompKeep) + : identity.isScalar(node) + ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) + : node.toString(ctx, onComment, onChompKeep); + if (!props) + return str; + return identity.isScalar(node) || str[0] === '{' || str[0] === '[' + ? `${props} ${str}` + : `${props}\n${ctx.indent}${str}`; +} + +exports.createStringifyContext = createStringifyContext; +exports.stringify = stringify; + + +/***/ }), + +/***/ 2466: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyCollection(collection, ctx, options) { + const flow = ctx.inFlow ?? collection.flow; + const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; + return stringify(collection, ctx, options); +} +function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { + const { indent, options: { commentString } } = ctx; + const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); + let chompKeep = false; // flag for the preceding node's status + const lines = []; + for (let i = 0; i < items.length; ++i) { + const item = items[i]; + let comment = null; + if (identity.isNode(item)) { + if (!chompKeep && item.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, item.commentBefore, chompKeep); + if (item.comment) + comment = item.comment; + } + else if (identity.isPair(item)) { + const ik = identity.isNode(item.key) ? item.key : null; + if (ik) { + if (!chompKeep && ik.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); + } + } + chompKeep = false; + let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); + if (comment) + str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); + if (chompKeep && comment) + chompKeep = false; + lines.push(blockItemPrefix + str); + } + let str; + if (lines.length === 0) { + str = flowChars.start + flowChars.end; + } + else { + str = lines[0]; + for (let i = 1; i < lines.length; ++i) { + const line = lines[i]; + str += line ? `\n${indent}${line}` : '\n'; + } + } + if (comment) { + str += '\n' + stringifyComment.indentComment(commentString(comment), indent); + if (onComment) + onComment(); + } + else if (chompKeep && onChompKeep) + onChompKeep(); + return str; +} +function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { + const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; + itemIndent += indentStep; + const itemCtx = Object.assign({}, ctx, { + indent: itemIndent, + inFlow: true, + type: null + }); + let reqNewline = false; + let linesAtValue = 0; + const lines = []; + for (let i = 0; i < items.length; ++i) { + const item = items[i]; + let comment = null; + if (identity.isNode(item)) { + if (item.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, item.commentBefore, false); + if (item.comment) + comment = item.comment; + } + else if (identity.isPair(item)) { + const ik = identity.isNode(item.key) ? item.key : null; + if (ik) { + if (ik.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, ik.commentBefore, false); + if (ik.comment) + reqNewline = true; + } + const iv = identity.isNode(item.value) ? item.value : null; + if (iv) { + if (iv.comment) + comment = iv.comment; + if (iv.commentBefore) + reqNewline = true; + } + else if (item.value == null && ik?.comment) { + comment = ik.comment; + } + } + if (comment) + reqNewline = true; + let str = stringify.stringify(item, itemCtx, () => (comment = null)); + if (i < items.length - 1) + str += ','; + if (comment) + str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); + if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) + reqNewline = true; + lines.push(str); + linesAtValue = lines.length; + } + let str; + const { start, end } = flowChars; + if (lines.length === 0) { + str = start + end; + } + else { + if (!reqNewline) { + const len = lines.reduce((sum, line) => sum + line.length + 2, 2); + reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength; + } + if (reqNewline) { + str = start; + for (const line of lines) + str += line ? `\n${indentStep}${indent}${line}` : '\n'; + str += `\n${indent}${end}`; + } + else { + str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; + } + } + if (comment) { + str += stringifyComment.lineComment(str, indent, commentString(comment)); + if (onComment) + onComment(); + } + return str; +} +function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { + if (comment && chompKeep) + comment = comment.replace(/^\n+/, ''); + if (comment) { + const ic = stringifyComment.indentComment(commentString(comment), indent); + lines.push(ic.trimStart()); // Avoid double indent on first line + } +} + +exports.stringifyCollection = stringifyCollection; + + +/***/ }), + +/***/ 5182: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Stringifies a comment. + * + * Empty comment lines are left empty, + * lines consisting of a single space are replaced by `#`, + * and all other lines are prefixed with a `#`. + */ +const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); +function indentComment(comment, indent) { + if (/^\n+$/.test(comment)) + return comment.substring(1); + return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; +} +const lineComment = (str, indent, comment) => str.endsWith('\n') + ? indentComment(comment, indent) + : comment.includes('\n') + ? '\n' + indentComment(comment, indent) + : (str.endsWith(' ') ? '' : ' ') + comment; + +exports.indentComment = indentComment; +exports.lineComment = lineComment; +exports.stringifyComment = stringifyComment; + + +/***/ }), + +/***/ 5225: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyDocument(doc, options) { + const lines = []; + let hasDirectives = options.directives === true; + if (options.directives !== false && doc.directives) { + const dir = doc.directives.toString(doc); + if (dir) { + lines.push(dir); + hasDirectives = true; + } + else if (doc.directives.docStart) + hasDirectives = true; + } + if (hasDirectives) + lines.push('---'); + const ctx = stringify.createStringifyContext(doc, options); + const { commentString } = ctx.options; + if (doc.commentBefore) { + if (lines.length !== 1) + lines.unshift(''); + const cs = commentString(doc.commentBefore); + lines.unshift(stringifyComment.indentComment(cs, '')); + } + let chompKeep = false; + let contentComment = null; + if (doc.contents) { + if (identity.isNode(doc.contents)) { + if (doc.contents.spaceBefore && hasDirectives) + lines.push(''); + if (doc.contents.commentBefore) { + const cs = commentString(doc.contents.commentBefore); + lines.push(stringifyComment.indentComment(cs, '')); + } + // top-level block scalars need to be indented if followed by a comment + ctx.forceBlockIndent = !!doc.comment; + contentComment = doc.contents.comment; + } + const onChompKeep = contentComment ? undefined : () => (chompKeep = true); + let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); + if (contentComment) + body += stringifyComment.lineComment(body, '', commentString(contentComment)); + if ((body[0] === '|' || body[0] === '>') && + lines[lines.length - 1] === '---') { + // Top-level block scalars with a preceding doc marker ought to use the + // same line for their header. + lines[lines.length - 1] = `--- ${body}`; + } + else + lines.push(body); + } + else { + lines.push(stringify.stringify(doc.contents, ctx)); + } + if (doc.directives?.docEnd) { + if (doc.comment) { + const cs = commentString(doc.comment); + if (cs.includes('\n')) { + lines.push('...'); + lines.push(stringifyComment.indentComment(cs, '')); + } + else { + lines.push(`... ${cs}`); + } + } + else { + lines.push('...'); + } + } + else { + let dc = doc.comment; + if (dc && chompKeep) + dc = dc.replace(/^\n+/, ''); + if (dc) { + if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') + lines.push(''); + lines.push(stringifyComment.indentComment(commentString(dc), '')); + } + } + return lines.join('\n') + '\n'; +} + +exports.stringifyDocument = stringifyDocument; + + +/***/ }), + +/***/ 4174: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function stringifyNumber({ format, minFractionDigits, tag, value }) { + if (typeof value === 'bigint') + return String(value); + const num = typeof value === 'number' ? value : Number(value); + if (!isFinite(num)) + return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; + let n = JSON.stringify(value); + if (!format && + minFractionDigits && + (!tag || tag === 'tag:yaml.org,2002:float') && + /^\d/.test(n)) { + let i = n.indexOf('.'); + if (i < 0) { + i = n.length; + n += '.'; + } + let d = minFractionDigits - (n.length - i - 1); + while (d-- > 0) + n += '0'; + } + return n; +} + +exports.stringifyNumber = stringifyNumber; + + +/***/ }), + +/***/ 4875: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { + const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; + let keyComment = (identity.isNode(key) && key.comment) || null; + if (simpleKeys) { + if (keyComment) { + throw new Error('With simple keys, key nodes cannot have comments'); + } + if (identity.isCollection(key)) { + const msg = 'With simple keys, collection cannot be used as a key value'; + throw new Error(msg); + } + } + let explicitKey = !simpleKeys && + (!key || + (keyComment && value == null && !ctx.inFlow) || + identity.isCollection(key) || + (identity.isScalar(key) + ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL + : typeof key === 'object')); + ctx = Object.assign({}, ctx, { + allNullValues: false, + implicitKey: !explicitKey && (simpleKeys || !allNullValues), + indent: indent + indentStep + }); + let keyCommentDone = false; + let chompKeep = false; + let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); + if (!explicitKey && !ctx.inFlow && str.length > 1024) { + if (simpleKeys) + throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); + explicitKey = true; + } + if (ctx.inFlow) { + if (allNullValues || value == null) { + if (keyCommentDone && onComment) + onComment(); + return str === '' ? '?' : explicitKey ? `? ${str}` : str; + } + } + else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { + str = `? ${str}`; + if (keyComment && !keyCommentDone) { + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + } + else if (chompKeep && onChompKeep) + onChompKeep(); + return str; + } + if (keyCommentDone) + keyComment = null; + if (explicitKey) { + if (keyComment) + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + str = `? ${str}\n${indent}:`; + } + else { + str = `${str}:`; + if (keyComment) + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + } + let vsb, vcb, valueComment; + if (identity.isNode(value)) { + vsb = !!value.spaceBefore; + vcb = value.commentBefore; + valueComment = value.comment; + } + else { + vsb = false; + vcb = null; + valueComment = null; + if (value && typeof value === 'object') + value = doc.createNode(value); + } + ctx.implicitKey = false; + if (!explicitKey && !keyComment && identity.isScalar(value)) + ctx.indentAtStart = str.length + 1; + chompKeep = false; + if (!indentSeq && + indentStep.length >= 2 && + !ctx.inFlow && + !explicitKey && + identity.isSeq(value) && + !value.flow && + !value.tag && + !value.anchor) { + // If indentSeq === false, consider '- ' as part of indentation where possible + ctx.indent = ctx.indent.substring(2); + } + let valueCommentDone = false; + const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); + let ws = ' '; + if (keyComment || vsb || vcb) { + ws = vsb ? '\n' : ''; + if (vcb) { + const cs = commentString(vcb); + ws += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; + } + if (valueStr === '' && !ctx.inFlow) { + if (ws === '\n') + ws = '\n\n'; + } + else { + ws += `\n${ctx.indent}`; + } + } + else if (!explicitKey && identity.isCollection(value)) { + const vs0 = valueStr[0]; + const nl0 = valueStr.indexOf('\n'); + const hasNewline = nl0 !== -1; + const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; + if (hasNewline || !flow) { + let hasPropsLine = false; + if (hasNewline && (vs0 === '&' || vs0 === '!')) { + let sp0 = valueStr.indexOf(' '); + if (vs0 === '&' && + sp0 !== -1 && + sp0 < nl0 && + valueStr[sp0 + 1] === '!') { + sp0 = valueStr.indexOf(' ', sp0 + 1); + } + if (sp0 === -1 || nl0 < sp0) + hasPropsLine = true; + } + if (!hasPropsLine) + ws = `\n${ctx.indent}`; + } + } + else if (valueStr === '' || valueStr[0] === '\n') { + ws = ''; + } + str += ws + valueStr; + if (ctx.inFlow) { + if (valueCommentDone && onComment) + onComment(); + } + else if (valueComment && !valueCommentDone) { + str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); + } + else if (chompKeep && onChompKeep) { + onChompKeep(); + } + return str; +} + +exports.stringifyPair = stringifyPair; + + +/***/ }), + +/***/ 6226: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var foldFlowLines = __nccwpck_require__(2889); + +const getFoldOptions = (ctx, isBlock) => ({ + indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, + lineWidth: ctx.options.lineWidth, + minContentWidth: ctx.options.minContentWidth +}); +// Also checks for lines starting with %, as parsing the output as YAML 1.1 will +// presume that's starting a new document. +const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); +function lineLengthOverLimit(str, lineWidth, indentLength) { + if (!lineWidth || lineWidth < 0) + return false; + const limit = lineWidth - indentLength; + const strLen = str.length; + if (strLen <= limit) + return false; + for (let i = 0, start = 0; i < strLen; ++i) { + if (str[i] === '\n') { + if (i - start > limit) + return true; + start = i + 1; + if (strLen - start <= limit) + return false; + } + } + return true; +} +function doubleQuotedString(value, ctx) { + const json = JSON.stringify(value); + if (ctx.options.doubleQuotedAsJSON) + return json; + const { implicitKey } = ctx; + const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + let str = ''; + let start = 0; + for (let i = 0, ch = json[i]; ch; ch = json[++i]) { + if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { + // space before newline needs to be escaped to not be folded + str += json.slice(start, i) + '\\ '; + i += 1; + start = i; + ch = '\\'; + } + if (ch === '\\') + switch (json[i + 1]) { + case 'u': + { + str += json.slice(start, i); + const code = json.substr(i + 2, 4); + switch (code) { + case '0000': + str += '\\0'; + break; + case '0007': + str += '\\a'; + break; + case '000b': + str += '\\v'; + break; + case '001b': + str += '\\e'; + break; + case '0085': + str += '\\N'; + break; + case '00a0': + str += '\\_'; + break; + case '2028': + str += '\\L'; + break; + case '2029': + str += '\\P'; + break; + default: + if (code.substr(0, 2) === '00') + str += '\\x' + code.substr(2); + else + str += json.substr(i, 6); + } + i += 5; + start = i + 1; + } + break; + case 'n': + if (implicitKey || + json[i + 2] === '"' || + json.length < minMultiLineLength) { + i += 1; + } + else { + // folding will eat first newline + str += json.slice(start, i) + '\n\n'; + while (json[i + 2] === '\\' && + json[i + 3] === 'n' && + json[i + 4] !== '"') { + str += '\n'; + i += 2; + } + str += indent; + // space after newline needs to be escaped to not be folded + if (json[i + 2] === ' ') + str += '\\'; + i += 1; + start = i + 1; + } + break; + default: + i += 1; + } + } + str = start ? str + json.slice(start) : json; + return implicitKey + ? str + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false)); +} +function singleQuotedString(value, ctx) { + if (ctx.options.singleQuote === false || + (ctx.implicitKey && value.includes('\n')) || + /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline + ) + return doubleQuotedString(value, ctx); + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; + return ctx.implicitKey + ? res + : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); +} +function quotedString(value, ctx) { + const { singleQuote } = ctx.options; + let qs; + if (singleQuote === false) + qs = doubleQuotedString; + else { + const hasDouble = value.includes('"'); + const hasSingle = value.includes("'"); + if (hasDouble && !hasSingle) + qs = singleQuotedString; + else if (hasSingle && !hasDouble) + qs = doubleQuotedString; + else + qs = singleQuote ? singleQuotedString : doubleQuotedString; + } + return qs(value, ctx); +} +// The negative lookbehind avoids a polynomial search, +// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind +let blockEndNewlines; +try { + blockEndNewlines = new RegExp('(^|(?\n'; + // determine chomping from whitespace at value end + let chomp; + let endStart; + for (endStart = value.length; endStart > 0; --endStart) { + const ch = value[endStart - 1]; + if (ch !== '\n' && ch !== '\t' && ch !== ' ') + break; + } + let end = value.substring(endStart); + const endNlPos = end.indexOf('\n'); + if (endNlPos === -1) { + chomp = '-'; // strip + } + else if (value === end || endNlPos !== end.length - 1) { + chomp = '+'; // keep + if (onChompKeep) + onChompKeep(); + } + else { + chomp = ''; // clip + } + if (end) { + value = value.slice(0, -end.length); + if (end[end.length - 1] === '\n') + end = end.slice(0, -1); + end = end.replace(blockEndNewlines, `$&${indent}`); + } + // determine indent indicator from whitespace at value start + let startWithSpace = false; + let startEnd; + let startNlPos = -1; + for (startEnd = 0; startEnd < value.length; ++startEnd) { + const ch = value[startEnd]; + if (ch === ' ') + startWithSpace = true; + else if (ch === '\n') + startNlPos = startEnd; + else + break; + } + let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); + if (start) { + value = value.substring(start.length); + start = start.replace(/\n+/g, `$&${indent}`); + } + const indentSize = indent ? '2' : '1'; // root is at -1 + let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; + if (comment) { + header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); + if (onComment) + onComment(); + } + if (literal) { + value = value.replace(/\n+/g, `$&${indent}`); + return `${header}\n${indent}${start}${value}${end}`; + } + value = value + .replace(/\n+/g, '\n$&') + .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded + // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent + .replace(/\n+/g, `$&${indent}`); + const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx, true)); + return `${header}\n${indent}${body}`; +} +function plainString(item, ctx, onComment, onChompKeep) { + const { type, value } = item; + const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; + if ((implicitKey && value.includes('\n')) || + (inFlow && /[[\]{},]/.test(value))) { + return quotedString(value, ctx); + } + if (!value || + /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { + // not allowed: + // - empty string, '-' or '?' + // - start with an indicator character (except [?:-]) or /[?-] / + // - '\n ', ': ' or ' \n' anywhere + // - '#' not preceded by a non-space char + // - end with ' ' or ':' + return implicitKey || inFlow || !value.includes('\n') + ? quotedString(value, ctx) + : blockString(item, ctx, onComment, onChompKeep); + } + if (!implicitKey && + !inFlow && + type !== Scalar.Scalar.PLAIN && + value.includes('\n')) { + // Where allowed & type not set explicitly, prefer block style for multiline strings + return blockString(item, ctx, onComment, onChompKeep); + } + if (containsDocumentMarker(value)) { + if (indent === '') { + ctx.forceBlockIndent = true; + return blockString(item, ctx, onComment, onChompKeep); + } + else if (implicitKey && indent === indentStep) { + return quotedString(value, ctx); + } + } + const str = value.replace(/\n+/g, `$&\n${indent}`); + // Verify that output will be parsed as a string, as e.g. plain numbers and + // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), + // and others in v1.1. + if (actualString) { + const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); + const { compat, tags } = ctx.doc.schema; + if (tags.some(test) || compat?.some(test)) + return quotedString(value, ctx); + } + return implicitKey + ? str + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); +} +function stringifyString(item, ctx, onComment, onChompKeep) { + const { implicitKey, inFlow } = ctx; + const ss = typeof item.value === 'string' + ? item + : Object.assign({}, item, { value: String(item.value) }); + let { type } = item; + if (type !== Scalar.Scalar.QUOTE_DOUBLE) { + // force double quotes on control characters & unpaired surrogates + if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) + type = Scalar.Scalar.QUOTE_DOUBLE; + } + const _stringify = (_type) => { + switch (_type) { + case Scalar.Scalar.BLOCK_FOLDED: + case Scalar.Scalar.BLOCK_LITERAL: + return implicitKey || inFlow + ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers + : blockString(ss, ctx, onComment, onChompKeep); + case Scalar.Scalar.QUOTE_DOUBLE: + return doubleQuotedString(ss.value, ctx); + case Scalar.Scalar.QUOTE_SINGLE: + return singleQuotedString(ss.value, ctx); + case Scalar.Scalar.PLAIN: + return plainString(ss, ctx, onComment, onChompKeep); + default: + return null; + } + }; + let res = _stringify(type); + if (res === null) { + const { defaultKeyType, defaultStringType } = ctx.options; + const t = (implicitKey && defaultKeyType) || defaultStringType; + res = _stringify(t); + if (res === null) + throw new Error(`Unsupported default string type ${t}`); + } + return res; +} + +exports.stringifyString = stringifyString; + + +/***/ }), + +/***/ 6796: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +const BREAK = Symbol('break visit'); +const SKIP = Symbol('skip children'); +const REMOVE = Symbol('remove node'); +/** + * Apply a visitor to an AST node or document. + * + * Walks through the tree (depth-first) starting from `node`, calling a + * `visitor` function with three arguments: + * - `key`: For sequence values and map `Pair`, the node's index in the + * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. + * `null` for the root node. + * - `node`: The current node. + * - `path`: The ancestry of the current node. + * + * The return value of the visitor may be used to control the traversal: + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this node, continue with next + * sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current node, then continue with the next one + * - `Node`: Replace the current node, then continue by visiting it + * - `number`: While iterating the items of a sequence or map, set the index + * of the next step. This is useful especially if the index of the current + * node has changed. + * + * If `visitor` is a single function, it will be called with all values + * encountered in the tree, including e.g. `null` values. Alternatively, + * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, + * `Alias` and `Scalar` node. To define the same visitor function for more than + * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) + * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most + * specific defined one will be used for each node. + */ +function visit(node, visitor) { + const visitor_ = initVisitor(visitor); + if (identity.isDocument(node)) { + const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); + if (cd === REMOVE) + node.contents = null; + } + else + visit_(null, node, visitor_, Object.freeze([])); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visit.BREAK = BREAK; +/** Do not visit the children of the current node */ +visit.SKIP = SKIP; +/** Remove the current node */ +visit.REMOVE = REMOVE; +function visit_(key, node, visitor, path) { + const ctrl = callVisitor(key, node, visitor, path); + if (identity.isNode(ctrl) || identity.isPair(ctrl)) { + replaceNode(key, path, ctrl); + return visit_(key, ctrl, visitor, path); + } + if (typeof ctrl !== 'symbol') { + if (identity.isCollection(node)) { + path = Object.freeze(path.concat(node)); + for (let i = 0; i < node.items.length; ++i) { + const ci = visit_(i, node.items[i], visitor, path); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + node.items.splice(i, 1); + i -= 1; + } + } + } + else if (identity.isPair(node)) { + path = Object.freeze(path.concat(node)); + const ck = visit_('key', node.key, visitor, path); + if (ck === BREAK) + return BREAK; + else if (ck === REMOVE) + node.key = null; + const cv = visit_('value', node.value, visitor, path); + if (cv === BREAK) + return BREAK; + else if (cv === REMOVE) + node.value = null; + } + } + return ctrl; +} +/** + * Apply an async visitor to an AST node or document. + * + * Walks through the tree (depth-first) starting from `node`, calling a + * `visitor` function with three arguments: + * - `key`: For sequence values and map `Pair`, the node's index in the + * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. + * `null` for the root node. + * - `node`: The current node. + * - `path`: The ancestry of the current node. + * + * The return value of the visitor may be used to control the traversal: + * - `Promise`: Must resolve to one of the following values + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this node, continue with next + * sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current node, then continue with the next one + * - `Node`: Replace the current node, then continue by visiting it + * - `number`: While iterating the items of a sequence or map, set the index + * of the next step. This is useful especially if the index of the current + * node has changed. + * + * If `visitor` is a single function, it will be called with all values + * encountered in the tree, including e.g. `null` values. Alternatively, + * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, + * `Alias` and `Scalar` node. To define the same visitor function for more than + * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) + * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most + * specific defined one will be used for each node. + */ +async function visitAsync(node, visitor) { + const visitor_ = initVisitor(visitor); + if (identity.isDocument(node)) { + const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); + if (cd === REMOVE) + node.contents = null; + } + else + await visitAsync_(null, node, visitor_, Object.freeze([])); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visitAsync.BREAK = BREAK; +/** Do not visit the children of the current node */ +visitAsync.SKIP = SKIP; +/** Remove the current node */ +visitAsync.REMOVE = REMOVE; +async function visitAsync_(key, node, visitor, path) { + const ctrl = await callVisitor(key, node, visitor, path); + if (identity.isNode(ctrl) || identity.isPair(ctrl)) { + replaceNode(key, path, ctrl); + return visitAsync_(key, ctrl, visitor, path); + } + if (typeof ctrl !== 'symbol') { + if (identity.isCollection(node)) { + path = Object.freeze(path.concat(node)); + for (let i = 0; i < node.items.length; ++i) { + const ci = await visitAsync_(i, node.items[i], visitor, path); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + node.items.splice(i, 1); + i -= 1; + } + } + } + else if (identity.isPair(node)) { + path = Object.freeze(path.concat(node)); + const ck = await visitAsync_('key', node.key, visitor, path); + if (ck === BREAK) + return BREAK; + else if (ck === REMOVE) + node.key = null; + const cv = await visitAsync_('value', node.value, visitor, path); + if (cv === BREAK) + return BREAK; + else if (cv === REMOVE) + node.value = null; + } + } + return ctrl; +} +function initVisitor(visitor) { + if (typeof visitor === 'object' && + (visitor.Collection || visitor.Node || visitor.Value)) { + return Object.assign({ + Alias: visitor.Node, + Map: visitor.Node, + Scalar: visitor.Node, + Seq: visitor.Node + }, visitor.Value && { + Map: visitor.Value, + Scalar: visitor.Value, + Seq: visitor.Value + }, visitor.Collection && { + Map: visitor.Collection, + Seq: visitor.Collection + }, visitor); + } + return visitor; +} +function callVisitor(key, node, visitor, path) { + if (typeof visitor === 'function') + return visitor(key, node, path); + if (identity.isMap(node)) + return visitor.Map?.(key, node, path); + if (identity.isSeq(node)) + return visitor.Seq?.(key, node, path); + if (identity.isPair(node)) + return visitor.Pair?.(key, node, path); + if (identity.isScalar(node)) + return visitor.Scalar?.(key, node, path); + if (identity.isAlias(node)) + return visitor.Alias?.(key, node, path); + return undefined; +} +function replaceNode(key, path, node) { + const parent = path[path.length - 1]; + if (identity.isCollection(parent)) { + parent.items[key] = node; + } + else if (identity.isPair(parent)) { + if (key === 'key') + parent.key = node; + else + parent.value = node; + } + else if (identity.isDocument(parent)) { + parent.contents = node; + } + else { + const pt = identity.isAlias(parent) ? 'alias' : 'scalar'; + throw new Error(`Cannot replace node with ${pt} parent`); + } +} + +exports.visit = visit; +exports.visitAsync = visitAsync; + + +/***/ }) + +/******/ }); +/************************************************************************/ +/******/ // The module cache +/******/ var __webpack_module_cache__ = {}; +/******/ +/******/ // The require function +/******/ function __nccwpck_require__(moduleId) { +/******/ // Check if module is in cache +/******/ var cachedModule = __webpack_module_cache__[moduleId]; +/******/ if (cachedModule !== undefined) { +/******/ return cachedModule.exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = __webpack_module_cache__[moduleId] = { +/******/ // no module.id needed +/******/ // no module.loaded needed +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete __webpack_module_cache__[moduleId]; +/******/ } +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/************************************************************************/ +/******/ /* webpack/runtime/compat */ +/******/ +/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; +/******/ +/************************************************************************/ +var __webpack_exports__ = {}; +// This entry need to be wrapped in an IIFE because it need to be in strict mode. +(() => { +"use strict"; +var exports = __webpack_exports__; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +/** + * The entrypoint for the action. + */ +const main_1 = __nccwpck_require__(399); +(0, main_1.run)(); + +})(); + +module.exports = __webpack_exports__; +/******/ })() +; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/index.js.map b/dist/index.js.map new file mode 100644 index 0000000..ba64664 --- /dev/null +++ b/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzmBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjFA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;ACpBA;;;;;;;;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAPA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACTA;AACA;AACA;AACA;AAGA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AAlBA;;;;;;;;;;;;ACRA;AAEA;;;;AAIA;AACA;AAGA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AApCA;;;;;;;;;ACRA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;;;;;;;ACDA;;AAEA;AACA;AAEA","sources":["../webpack://validator/./node_modules/@actions/core/lib/command.js","../webpack://validator/./node_modules/@actions/core/lib/core.js","../webpack://validator/./node_modules/@actions/core/lib/file-command.js","../webpack://validator/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://validator/./node_modules/@actions/core/lib/path-utils.js","../webpack://validator/./node_modules/@actions/core/lib/summary.js","../webpack://validator/./node_modules/@actions/core/lib/utils.js","../webpack://validator/./node_modules/@actions/http-client/lib/auth.js","../webpack://validator/./node_modules/@actions/http-client/lib/index.js","../webpack://validator/./node_modules/@actions/http-client/lib/proxy.js","../webpack://validator/./node_modules/tunnel/index.js","../webpack://validator/./node_modules/tunnel/lib/tunnel.js","../webpack://validator/./node_modules/uuid/dist/index.js","../webpack://validator/./node_modules/uuid/dist/md5.js","../webpack://validator/./node_modules/uuid/dist/nil.js","../webpack://validator/./node_modules/uuid/dist/parse.js","../webpack://validator/./node_modules/uuid/dist/regex.js","../webpack://validator/./node_modules/uuid/dist/rng.js","../webpack://validator/./node_modules/uuid/dist/sha1.js","../webpack://validator/./node_modules/uuid/dist/stringify.js","../webpack://validator/./node_modules/uuid/dist/v1.js","../webpack://validator/./node_modules/uuid/dist/v3.js","../webpack://validator/./node_modules/uuid/dist/v35.js","../webpack://validator/./node_modules/uuid/dist/v4.js","../webpack://validator/./node_modules/uuid/dist/v5.js","../webpack://validator/./node_modules/uuid/dist/validate.js","../webpack://validator/./node_modules/uuid/dist/version.js","../webpack://validator/./src/format.ts","../webpack://validator/./src/main.ts","../webpack://validator/./src/parse.ts","../webpack://validator/external node-commonjs \"assert\"","../webpack://validator/external node-commonjs \"crypto\"","../webpack://validator/external node-commonjs \"events\"","../webpack://validator/external node-commonjs \"fs\"","../webpack://validator/external node-commonjs \"http\"","../webpack://validator/external node-commonjs \"https\"","../webpack://validator/external node-commonjs \"net\"","../webpack://validator/external node-commonjs \"os\"","../webpack://validator/external node-commonjs \"path\"","../webpack://validator/external node-commonjs \"tls\"","../webpack://validator/external node-commonjs \"util\"","../webpack://validator/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://validator/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://validator/./node_modules/yaml/dist/compose/compose-node.js","../webpack://validator/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://validator/./node_modules/yaml/dist/compose/composer.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://validator/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://validator/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://validator/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://validator/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://validator/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://validator/./node_modules/yaml/dist/doc/Document.js","../webpack://validator/./node_modules/yaml/dist/doc/anchors.js","../webpack://validator/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://validator/./node_modules/yaml/dist/doc/createNode.js","../webpack://validator/./node_modules/yaml/dist/doc/directives.js","../webpack://validator/./node_modules/yaml/dist/errors.js","../webpack://validator/./node_modules/yaml/dist/index.js","../webpack://validator/./node_modules/yaml/dist/log.js","../webpack://validator/./node_modules/yaml/dist/nodes/Alias.js","../webpack://validator/./node_modules/yaml/dist/nodes/Collection.js","../webpack://validator/./node_modules/yaml/dist/nodes/Node.js","../webpack://validator/./node_modules/yaml/dist/nodes/Pair.js","../webpack://validator/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://validator/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://validator/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://validator/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://validator/./node_modules/yaml/dist/nodes/identity.js","../webpack://validator/./node_modules/yaml/dist/nodes/toJS.js","../webpack://validator/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://validator/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://validator/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://validator/./node_modules/yaml/dist/parse/cst.js","../webpack://validator/./node_modules/yaml/dist/parse/lexer.js","../webpack://validator/./node_modules/yaml/dist/parse/line-counter.js","../webpack://validator/./node_modules/yaml/dist/parse/parser.js","../webpack://validator/./node_modules/yaml/dist/public-api.js","../webpack://validator/./node_modules/yaml/dist/schema/Schema.js","../webpack://validator/./node_modules/yaml/dist/schema/common/map.js","../webpack://validator/./node_modules/yaml/dist/schema/common/null.js","../webpack://validator/./node_modules/yaml/dist/schema/common/seq.js","../webpack://validator/./node_modules/yaml/dist/schema/common/string.js","../webpack://validator/./node_modules/yaml/dist/schema/core/bool.js","../webpack://validator/./node_modules/yaml/dist/schema/core/float.js","../webpack://validator/./node_modules/yaml/dist/schema/core/int.js","../webpack://validator/./node_modules/yaml/dist/schema/core/schema.js","../webpack://validator/./node_modules/yaml/dist/schema/json/schema.js","../webpack://validator/./node_modules/yaml/dist/schema/tags.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://validator/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://validator/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringify.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://validator/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://validator/./node_modules/yaml/dist/visit.js","../webpack://validator/webpack/bootstrap","../webpack://validator/webpack/runtime/compat","../webpack://validator/./src/index.ts"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n readBodyBuffer() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n const chunks = [];\n this.message.on('data', (chunk) => {\n chunks.push(chunk);\n });\n this.message.on('end', () => {\n resolve(Buffer.concat(chunks));\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n try {\n return new URL(proxyVar);\n }\n catch (_a) {\n if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))\n return new URL(`http://${proxyVar}`);\n }\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","/**\n * Formats a input name to a snake case string\n *\n * - Removes leading and trailing whitespace\n * - Converts to lowercase\n * - Replaces spaces with underscores\n * - Replaces non-alphanumeric characters with underscores\n * - Replaces multiple consecutive underscores with a single underscore\n */\nexport function formatKey(name: string): string {\n return name\n .trim()\n .toLowerCase()\n .replace(/[^a-z0-9]/g, '_')\n .replace(/^_+|_+$/g, '')\n .replace(/_+/g, '_')\n}\n","import * as core from '@actions/core'\nimport fs from 'fs'\nimport YAML from 'yaml'\nimport { parseTemplate } from './parse'\nimport { IssueFormTemplate } from './interfaces'\n\n/**\n * The entrypoint for the action\n */\nexport async function run(): Promise {\n // Get inputs\n const template: string = core.getInput('issue-form-template', {\n required: true\n })\n const workspace: string = core.getInput('workspace', { required: true })\n\n // Verify the template exists\n if (!fs.existsSync(`${workspace.replace(/\\/+$/, '')}/${template}`)) {\n core.setFailed(`Template not found: ${template}`)\n return\n }\n\n // Read and parse the template\n const templateYaml: IssueFormTemplate = YAML.parse(\n fs.readFileSync(`${workspace}/${template}`, 'utf8')\n )\n await parseTemplate(templateYaml)\n}\n","import { IssueFormTemplate, FormattedField } from './interfaces'\nimport { formatKey } from './format'\n\n/**\n * Parses the issue form template and returns a dictionary of fields\n * @param template The issue form template\n * @returns A dictionary of fields\n */\nexport async function parseTemplate(\n template: IssueFormTemplate\n): Promise<{ [key: string]: FormattedField }> {\n const fields: { [key: string]: FormattedField } = {}\n\n for (const item of template.body) {\n // Skip markdown fields\n if (item.type === 'markdown') continue\n\n // Convert the label to snake case. This is the heading in the issue body\n // when the form is submitted, and is used by issue-ops/parser as the key.\n const formattedKey: string = formatKey(item.attributes.label)\n\n // Take the rest of the attributes and add them to the fields object\n fields[formattedKey] = {\n type: item.type,\n required: item.validations?.required || false\n }\n\n if (item.type === 'dropdown') {\n // These fields are only used by dropdowns\n fields[formattedKey].default = item.attributes.default\n fields[formattedKey].multiple = item.attributes.multiple || false\n fields[formattedKey].options = item.attributes.options\n }\n\n if (item.type === 'checkboxes') {\n // Checkboxes have a different options format than dropdowns\n // Enforce false for required if not present\n fields[formattedKey].options = item.attributes.options.map((x) => {\n return { label: x.label, required: x.required || false }\n })\n }\n }\n\n return fields\n}\n","module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction resolveCollection(CN, ctx, token, onError, tagName, tag) {\n const coll = token.type === 'block-map'\n ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag)\n : token.type === 'block-seq'\n ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag)\n : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag);\n const Coll = coll.constructor;\n // If we got a tagName matching the class, or the tag name is '!',\n // then use the tagName from the node class used to create it.\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n if (tagName)\n coll.tag = tagName;\n return coll;\n}\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n const tagName = !tagToken\n ? null\n : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n const expType = token.type === 'block-map'\n ? 'map'\n : token.type === 'block-seq'\n ? 'seq'\n : token.start.source === '{'\n ? 'map'\n : 'seq';\n // shortcut: check if it's a generic YAMLMap or YAMLSeq\n // before jumping into the custom tag logic.\n if (!tagToken ||\n !tagName ||\n tagName === '!' ||\n (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') ||\n (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq') ||\n !expType) {\n return resolveCollection(CN, ctx, token, onError, tagName);\n }\n let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n if (kt?.collection) {\n onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n }\n return resolveCollection(CN, ctx, token, onError, tagName);\n }\n }\n const coll = resolveCollection(CN, ctx, token, onError, tagName, tag);\n const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll;\n const node = identity.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n // @ts-expect-error If Contents is set, let's trust the user\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n node.comment = comment;\n node.range[2] = end;\n }\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[identity.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[identity.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[identity.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[identity.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[identity.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar identity = require('../nodes/identity.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (identity.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) {\n const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap;\n const map = new NodeClass(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n let commentEnd = null;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n commentEnd = keyProps.end;\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n if (commentEnd && commentEnd < offset)\n onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');\n map.range = [bm.offset, offset, commentEnd ?? offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) {\n const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq;\n const seq = new NodeClass(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n let commentEnd = null;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n commentEnd = props.end;\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, commentEnd ?? offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq));\n const coll = new NodeClass(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (identity.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (identity.isScalar(a) &&\n identity.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar identity = require('../nodes/identity.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n // @ts-expect-error We can't really know that this matches Contents.\n this.contents =\n value === undefined ? null : this.createNode(value, _replacer, options);\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [identity.NODE_TYPE]: { value: identity.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n // @ts-expect-error We can't really know that this matches Contents.\n copy.contents = identity.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && identity.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n // @ts-expect-error Presumed impossible if Strict extends false\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return identity.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && identity.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return identity.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return identity.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n // @ts-expect-error We can't really know that this matches Contents.\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path)) {\n // @ts-expect-error We can't really know that this matches Contents.\n this.contents = value;\n }\n else if (this.contents == null) {\n // @ts-expect-error We can't really know that this matches Contents.\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (identity.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (identity.isScalar(ref.node) || identity.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (identity.isDocument(value))\n value = value.contents;\n if (identity.isNode(value))\n return value;\n if (identity.isPair(value)) {\n const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[identity.MAP]\n : Symbol.iterator in Object(value)\n ? schema[identity.SEQ]\n : schema[identity.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : typeof tagObj?.nodeClass?.from === 'function'\n ? tagObj.nodeClass.from(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n else if (!tagObj.default)\n node.tag = tagObj.tag;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (identity.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.max(1, Math.min(end.col - col, 80 - ci));\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar identity = require('./nodes/identity.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = identity.isAlias;\nexports.isCollection = identity.isCollection;\nexports.isDocument = identity.isDocument;\nexports.isMap = identity.isMap;\nexports.isNode = identity.isNode;\nexports.isPair = identity.isPair;\nexports.isScalar = identity.isScalar;\nexports.isSeq = identity.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n // https://github.com/typescript-eslint/typescript-eslint/issues/7478\n // eslint-disable-next-line @typescript-eslint/prefer-optional-chain\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(identity.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n let data = anchors.get(source);\n if (!data) {\n // Resolve anchors for Node.prototype.toJS()\n toJS.toJS(source, null, ctx);\n data = anchors.get(source);\n }\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (identity.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (identity.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (identity.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (identity.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (identity.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && identity.isScalar(node) ? node.value : node;\n else\n return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!identity.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n identity.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return identity.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (identity.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nvar applyReviver = require('../doc/applyReviver.js');\nvar identity = require('./identity.js');\nvar toJS = require('./toJS.js');\n\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, identity.NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** A plain JavaScript representation of this node. */\n toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n if (!identity.isDocument(doc))\n throw new TypeError('A document argument is required');\n const ctx = {\n anchors: new Map(),\n doc,\n keep: true,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100\n };\n const res = toJS.toJS(this, '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n}\n\nexports.NodeBase = NodeBase;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar identity = require('./identity.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (identity.isNode(key))\n key = key.clone(schema);\n if (identity.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(identity.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar identity = require('./identity.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = identity.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (identity.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (identity.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n constructor(schema) {\n super(identity.MAP, schema);\n this.items = [];\n }\n /**\n * A generic collection parsing method that can be extended\n * to other node classes that inherit from YAMLMap\n */\n static from(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new this(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (identity.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!identity.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar identity = require('./identity.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n constructor(schema) {\n super(identity.SEQ, schema);\n this.items = [];\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && identity.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (identity.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n static from(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new this(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n }\n}\nfunction asItemIndex(key) {\n let idx = identity.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar identity = require('./identity.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = identity.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (identity.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (identity.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!identity.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (identity.isNode(key) && ctx?.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar identity = require('./identity.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !identity.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, identity.MAP, { value: map.map });\n Object.defineProperty(this, identity.SCALAR, { value: string.string });\n Object.defineProperty(this, identity.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nconst map = {\n collection: 'map',\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!identity.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx)\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nconst seq = {\n collection: 'seq',\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!identity.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n },\n createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx)\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar toJS = require('../../nodes/toJS.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (identity.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n static from(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new this();\n omap.items = pairs$1.items;\n return omap;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (identity.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx)\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (identity.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (identity.isPair(item))\n continue;\n else if (identity.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else {\n throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`);\n }\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (identity.isPair(key))\n pair = key;\n else if (key &&\n typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && identity.isPair(pair)\n ? identity.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n static from(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new this(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx),\n resolve(map, onError) {\n if (identity.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => String(n).padStart(2, '0'))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar identity = require('../nodes/identity.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n flowCollectionPadding: true,\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (identity.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (identity.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (identity.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = identity.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : identity.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return identity.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar identity = require('../nodes/identity.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (identity.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (identity.isPair(item)) {\n const ik = identity.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (identity.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (identity.isPair(item)) {\n const ik = identity.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = identity.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik?.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, indent, commentString(comment));\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (identity.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (identity.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (identity.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n identity.isCollection(key) ||\n (identity.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vsb, vcb, valueComment;\n if (identity.isNode(value)) {\n vsb = !!value.spaceBefore;\n vcb = value.commentBefore;\n valueComment = value.comment;\n }\n else {\n vsb = false;\n vcb = null;\n valueComment = null;\n if (value && typeof value === 'object')\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && identity.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n identity.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substring(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (keyComment || vsb || vcb) {\n ws = vsb ? '\\n' : '';\n if (vcb) {\n const cs = commentString(vcb);\n ws += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n if (valueStr === '' && !ctx.inFlow) {\n if (ws === '\\n')\n ws = '\\n\\n';\n }\n else {\n ws += `\\n${ctx.indent}`;\n }\n }\n else if (!explicitKey && identity.isCollection(value)) {\n const vs0 = valueStr[0];\n const nl0 = valueStr.indexOf('\\n');\n const hasNewline = nl0 !== -1;\n const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;\n if (hasNewline || !flow) {\n let hasPropsLine = false;\n if (hasNewline && (vs0 === '&' || vs0 === '!')) {\n let sp0 = valueStr.indexOf(' ');\n if (vs0 === '&' &&\n sp0 !== -1 &&\n sp0 < nl0 &&\n valueStr[sp0 + 1] === '!') {\n sp0 = valueStr.indexOf(' ', sp0 + 1);\n }\n if (sp0 === -1 || nl0 < sp0)\n hasPropsLine = true;\n }\n if (!hasPropsLine)\n ws = `\\n${ctx.indent}`;\n }\n }\n else if (valueStr === '' || valueStr[0] === '\\n') {\n ws = '';\n }\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx, isBlock) => ({\n indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\n// The negative lookbehind avoids a polynomial search,\n// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind\nlet blockEndNewlines;\ntry {\n blockEndNewlines = new RegExp('(^|(?\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(blockEndNewlines, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx, true));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;\n if ((implicitKey && value.includes('\\n')) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (containsDocumentMarker(value)) {\n if (indent === '') {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n else if (implicitKey && indent === indentStep) {\n return quotedString(value, ctx);\n }\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar identity = require('./nodes/identity.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (identity.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (identity.isNode(ctrl) || identity.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (identity.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (identity.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (identity.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (identity.isNode(ctrl) || identity.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (identity.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (identity.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (identity.isMap(node))\n return visitor.Map?.(key, node, path);\n if (identity.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (identity.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (identity.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (identity.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (identity.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (identity.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (identity.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = identity.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","/**\n * The entrypoint for the action.\n */\nimport { run } from './main'\n\nrun()\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/licenses.txt b/dist/licenses.txt new file mode 100644 index 0000000..07dcdb1 --- /dev/null +++ b/dist/licenses.txt @@ -0,0 +1,90 @@ +@actions/core +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +uuid +MIT +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +yaml +ISC +Copyright Eemeli Aro + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/dist/sourcemap-register.js b/dist/sourcemap-register.js new file mode 100644 index 0000000..466141d --- /dev/null +++ b/dist/sourcemap-register.js @@ -0,0 +1 @@ +(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},274:(e,r,n)=>{var t=n(339);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(190);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},680:(e,r,n)=>{var t=n(339);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},758:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(339);var i=n(345);var a=n(274).I;var u=n(449);var s=n(758).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(449);var o=n(339);var i=n(274).I;var a=n(680).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},351:(e,r,n)=>{var t;var o=n(591).h;var i=n(339);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},997:(e,r,n)=>{n(591).h;r.SourceMapConsumer=n(952).SourceMapConsumer;n(351)},284:(e,r,n)=>{e=n.nmd(e);var t=n(997).SourceMapConsumer;var o=n(17);var i;try{i=n(147);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function globalProcessVersion(){if(typeof process==="object"&&process!==null){return process.version}else{return""}}function globalProcessStderr(){if(typeof process==="object"&&process!==null){return process.stderr}}function globalProcessExit(e){if(typeof process==="object"&&process!==null&&typeof process.exit==="function"){return process.exit(e)}}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(globalProcessVersion())?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);var n=globalProcessStderr();if(n&&n._handle&&n._handle.setBlocking){n._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);globalProcessExit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},147:e=>{"use strict";e.exports=require("fs")},17:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); \ No newline at end of file