From aa82f530a58dff42e906fe30e7669deac175cd61 Mon Sep 17 00:00:00 2001 From: Julien Catania Date: Fri, 12 Jan 2024 10:00:41 +0100 Subject: [PATCH] update dist/index.js --- dist/index.js | 63223 ++++++++++++++++++++++++++++++++++-------------- 1 file changed, 45716 insertions(+), 17507 deletions(-) diff --git a/dist/index.js b/dist/index.js index 68a0d0b..47a1b8d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,4 +1,3 @@ -require('./sourcemap-register.js');module.exports = /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ @@ -9,7 +8,11 @@ require('./sourcemap-register.js');module.exports = var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -26,6 +29,15 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; @@ -67,61 +79,62 @@ class ArtifactProvider { this.artifactNameMatch = (str) => str === this.artifact; this.getReportName = () => this.name; } - this.fileNameMatch = picomatch_1.default(pattern); + this.fileNameMatch = (0, picomatch_1.default)(pattern); } - async load() { - const result = {}; - const resp = await this.octokit.actions.listWorkflowRunArtifacts({ - ...github.context.repo, - run_id: this.runId - }); - if (resp.data.artifacts.length === 0) { - core.warning(`No artifacts found in run ${this.runId}`); - return {}; - } - const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name)); - if (artifacts.length === 0) { - core.warning(`No artifact matches ${this.artifact}`); - return {}; - } - for (const art of artifacts) { - const fileName = `${art.name}.zip`; - await github_utils_1.downloadArtifact(this.octokit, art.id, fileName, this.token); - core.startGroup(`Reading archive ${fileName}`); - try { - const reportName = this.getReportName(art.name); - core.info(`Report name: ${reportName}`); - const files = []; - const zip = new adm_zip_1.default(fileName); - for (const entry of zip.getEntries()) { - const file = entry.entryName; - if (entry.isDirectory) { - core.info(`Skipping ${file}: entry is a directory`); - continue; + load() { + return __awaiter(this, void 0, void 0, function* () { + const result = {}; + const resp = yield this.octokit.rest.actions.listWorkflowRunArtifacts(Object.assign(Object.assign({}, github.context.repo), { run_id: this.runId })); + if (resp.data.artifacts.length === 0) { + core.warning(`No artifacts found in run ${this.runId}`); + return {}; + } + const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name)); + if (artifacts.length === 0) { + core.warning(`No artifact matches ${this.artifact}`); + return {}; + } + for (const art of artifacts) { + const fileName = `${art.name}.zip`; + yield (0, github_utils_1.downloadArtifact)(this.octokit, art.id, fileName, this.token); + core.startGroup(`Reading archive ${fileName}`); + try { + const reportName = this.getReportName(art.name); + core.info(`Report name: ${reportName}`); + const files = []; + const zip = new adm_zip_1.default(fileName); + for (const entry of zip.getEntries()) { + const file = entry.entryName; + if (entry.isDirectory) { + core.info(`Skipping ${file}: entry is a directory`); + continue; + } + if (!this.fileNameMatch(file)) { + core.info(`Skipping ${file}: filename does not match pattern`); + continue; + } + const content = zip.readAsText(entry); + files.push({ file, content }); + core.info(`Read ${file}: ${content.length} chars`); } - if (!this.fileNameMatch(file)) { - core.info(`Skipping ${file}: filename does not match pattern`); - continue; + if (result[reportName]) { + result[reportName].push(...files); + } + else { + result[reportName] = files; } - const content = zip.readAsText(entry); - files.push({ file, content }); - core.info(`Read ${file}: ${content.length} chars`); - } - if (result[reportName]) { - result[reportName].push(...files); } - else { - result[reportName] = files; + finally { + core.endGroup(); } } - finally { - core.endGroup(); - } - } - return result; + return result; + }); } - async listTrackedFiles() { - return github_utils_1.listFiles(this.octokit, this.sha); + listTrackedFiles() { + return __awaiter(this, void 0, void 0, function* () { + return (0, github_utils_1.listFiles)(this.octokit, this.sha); + }); } } exports.ArtifactProvider = ArtifactProvider; @@ -136,7 +149,11 @@ exports.ArtifactProvider = ArtifactProvider; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -153,12 +170,21 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LocalFileProvider = void 0; -const fs = __importStar(__nccwpck_require__(5747)); +const fs = __importStar(__nccwpck_require__(7147)); const fast_glob_1 = __importDefault(__nccwpck_require__(3664)); const git_1 = __nccwpck_require__(9844); class LocalFileProvider { @@ -166,19 +192,23 @@ class LocalFileProvider { this.name = name; this.pattern = pattern; } - async load() { - const result = []; - for (const pat of this.pattern) { - const paths = await fast_glob_1.default(pat, { dot: true }); - for (const file of paths) { - const content = await fs.promises.readFile(file, { encoding: 'utf8' }); - result.push({ file, content }); + load() { + return __awaiter(this, void 0, void 0, function* () { + const result = []; + for (const pat of this.pattern) { + const paths = yield (0, fast_glob_1.default)(pat, { dot: true }); + for (const file of paths) { + const content = yield fs.promises.readFile(file, { encoding: 'utf8' }); + result.push({ file, content }); + } } - } - return { [this.name]: result }; + return { [this.name]: result }; + }); } - async listTrackedFiles() { - return git_1.listFiles(); + listTrackedFiles() { + return __awaiter(this, void 0, void 0, function* () { + return (0, git_1.listFiles)(); + }); } } exports.LocalFileProvider = LocalFileProvider; @@ -193,7 +223,11 @@ exports.LocalFileProvider = LocalFileProvider; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -210,6 +244,15 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); @@ -222,16 +265,22 @@ const dotnet_trx_parser_1 = __nccwpck_require__(2664); const java_junit_parser_1 = __nccwpck_require__(676); const jest_junit_parser_1 = __nccwpck_require__(1113); const mocha_json_parser_1 = __nccwpck_require__(6043); +const swift_xunit_parser_1 = __nccwpck_require__(5366); const path_utils_1 = __nccwpck_require__(4070); const github_utils_1 = __nccwpck_require__(3522); -async function main() { - try { - const testReporter = new TestReporter(); - await testReporter.run(); - } - catch (error) { - core.setFailed(error.message); - } +function main() { + return __awaiter(this, void 0, void 0, function* () { + try { + const testReporter = new TestReporter(); + yield testReporter.run(); + } + catch (error) { + if (error instanceof Error) + core.setFailed(error); + else + core.setFailed(JSON.stringify(error)); + } + }); } class TestReporter { constructor() { @@ -244,10 +293,11 @@ class TestReporter { this.listTests = core.getInput('list-tests', { required: true }); this.maxAnnotations = parseInt(core.getInput('max-annotations', { required: true })); this.failOnError = core.getInput('fail-on-error', { required: true }) === 'true'; + this.failOnEmpty = core.getInput('fail-on-empty', { required: true }) === 'true'; this.workDirInput = core.getInput('working-directory', { required: false }); this.onlySummary = core.getInput('only-summary', { required: false }) === 'true'; this.token = core.getInput('token', { required: true }); - this.context = github_utils_1.getCheckRunContext(); + this.context = (0, github_utils_1.getCheckRunContext)(); this.octokit = github.getOctokit(this.token); if (this.listSuites !== 'all' && this.listSuites !== 'failed') { core.setFailed(`Input parameter 'list-suites' has invalid value`); @@ -262,112 +312,113 @@ class TestReporter { return; } } - async run() { - if (this.workDirInput) { - core.info(`Changing directory to '${this.workDirInput}'`); - process.chdir(this.workDirInput); - } - core.info(`Check runs will be created with SHA=${this.context.sha}`); - // Split path pattern by ',' and optionally convert all backslashes to forward slashes - // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators - const pathsList = this.path.split(','); - const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList; - const inputProvider = this.artifact - ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token) - : new local_file_provider_1.LocalFileProvider(this.name, pattern); - const parseErrors = this.maxAnnotations > 0; - const trackedFiles = await inputProvider.listTrackedFiles(); - const workDir = this.artifact ? undefined : path_utils_1.normalizeDirPath(process.cwd(), true); - core.info(`Found ${trackedFiles.length} files tracked by GitHub`); - const options = { - workDir, - trackedFiles, - parseErrors - }; - core.info(`Using test report parser '${this.reporter}'`); - const parser = this.getParser(this.reporter, options); - const results = []; - const input = await inputProvider.load(); - for (const [reportName, files] of Object.entries(input)) { - try { - core.startGroup(`Creating test report ${reportName}`); - const tr = await this.createReport(parser, reportName, files); - results.push(...tr); - } - finally { - core.endGroup(); - } - } - const isFailed = results.some(tr => tr.result === 'failed'); - const conclusion = isFailed ? 'failure' : 'success'; - const passed = results.reduce((sum, tr) => sum + tr.passed, 0); - const failed = results.reduce((sum, tr) => sum + tr.failed, 0); - const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); - const time = results.reduce((sum, tr) => sum + tr.time, 0); - core.setOutput('conclusion', conclusion); - core.setOutput('passed', passed); - core.setOutput('failed', failed); - core.setOutput('skipped', skipped); - core.setOutput('time', time); - if (this.failOnError && isFailed) { - core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`); - return; - } - if (results.length === 0) { - core.setFailed(`No test report files were found`); - return; - } - } - async createReport(parser, name, files) { - if (files.length === 0) { - core.warning(`No file matches path ${this.path}`); - return []; - } - const results = []; - for (const { file, content } of files) { - core.info(`Processing test results from ${file}`); - const tr = await parser.parse(file, content); - results.push(tr); - } - core.info(`Creating check run ${name}`); - const createResp = await this.octokit.checks.create({ - head_sha: this.context.sha, - name, - status: 'in_progress', - output: { - title: name, - summary: '' - }, - ...github.context.repo + run() { + return __awaiter(this, void 0, void 0, function* () { + if (this.workDirInput) { + core.info(`Changing directory to '${this.workDirInput}'`); + process.chdir(this.workDirInput); + } + core.info(`Check runs will be created with SHA=${this.context.sha}`); + // Split path pattern by ',' and optionally convert all backslashes to forward slashes + // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators + const pathsList = this.path.split(','); + const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList; + const inputProvider = this.artifact + ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token) + : new local_file_provider_1.LocalFileProvider(this.name, pattern); + const parseErrors = this.maxAnnotations > 0; + const trackedFiles = parseErrors ? yield inputProvider.listTrackedFiles() : []; + const workDir = this.artifact ? undefined : (0, path_utils_1.normalizeDirPath)(process.cwd(), true); + if (parseErrors) + core.info(`Found ${trackedFiles.length} files tracked by GitHub`); + const options = { + workDir, + trackedFiles, + parseErrors + }; + core.info(`Using test report parser '${this.reporter}'`); + const parser = this.getParser(this.reporter, options); + const results = []; + const input = yield inputProvider.load(); + for (const [reportName, files] of Object.entries(input)) { + try { + core.startGroup(`Creating test report ${reportName}`); + const tr = yield this.createReport(parser, reportName, files); + results.push(...tr); + } + finally { + core.endGroup(); + } + } + const isFailed = results.some(tr => tr.result === 'failed'); + const conclusion = isFailed ? 'failure' : 'success'; + const passed = results.reduce((sum, tr) => sum + tr.passed, 0); + const failed = results.reduce((sum, tr) => sum + tr.failed, 0); + const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); + const time = results.reduce((sum, tr) => sum + tr.time, 0); + core.setOutput('conclusion', conclusion); + core.setOutput('passed', passed); + core.setOutput('failed', failed); + core.setOutput('skipped', skipped); + core.setOutput('time', time); + if (this.failOnError && isFailed) { + core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`); + return; + } + if (results.length === 0 && this.failOnEmpty) { + core.setFailed(`No test report files were found`); + return; + } }); - core.info('Creating report summary'); - const { listSuites, listTests, onlySummary } = this; - const baseUrl = createResp.data.html_url; - const summary = get_report_1.getReport(results, { listSuites, listTests, baseUrl, onlySummary }); - core.info('Creating annotations'); - const annotations = get_annotations_1.getAnnotations(results, this.maxAnnotations); - const isFailed = results.some(tr => tr.result === 'failed'); - const conclusion = isFailed ? 'failure' : 'success'; - const passed = results.reduce((sum, tr) => sum + tr.passed, 0); - const failed = results.reduce((sum, tr) => sum + tr.failed, 0); - const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); - const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `; - core.info(`Updating check run conclusion (${conclusion}) and output`); - const resp = await this.octokit.checks.update({ - check_run_id: createResp.data.id, - conclusion, - status: 'completed', - output: { - title: shortSummary, - summary, - annotations - }, - ...github.context.repo + } + createReport(parser, name, files) { + return __awaiter(this, void 0, void 0, function* () { + if (files.length === 0) { + core.warning(`No file matches path ${this.path}`); + return []; + } + core.info(`Processing test results for check run ${name}`); + const results = []; + for (const { file, content } of files) { + try { + const tr = yield parser.parse(file, content); + results.push(tr); + } + catch (error) { + core.error(`Processing test results from ${file} failed`); + throw error; + } + } + core.info(`Creating check run ${name}`); + const createResp = yield this.octokit.rest.checks.create(Object.assign({ head_sha: this.context.sha, name, status: 'in_progress', output: { + title: name, + summary: '' + } }, github.context.repo)); + core.info('Creating report summary'); + const { listSuites, listTests, onlySummary } = this; + const baseUrl = createResp.data.html_url; + const summary = (0, get_report_1.getReport)(results, { listSuites, listTests, baseUrl, onlySummary }); + core.info('Creating annotations'); + const annotations = (0, get_annotations_1.getAnnotations)(results, this.maxAnnotations); + const isFailed = this.failOnError && results.some(tr => tr.result === 'failed'); + const conclusion = isFailed ? 'failure' : 'success'; + const passed = results.reduce((sum, tr) => sum + tr.passed, 0); + const failed = results.reduce((sum, tr) => sum + tr.failed, 0); + const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); + const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `; + core.info(`Updating check run conclusion (${conclusion}) and output`); + const resp = yield this.octokit.rest.checks.update(Object.assign({ check_run_id: createResp.data.id, conclusion, status: 'completed', output: { + title: shortSummary, + summary, + annotations + } }, github.context.repo)); + core.info(`Check run create response: ${resp.status}`); + core.info(`Check run URL: ${resp.data.url}`); + core.info(`Check run HTML: ${resp.data.html_url}`); + core.setOutput('url', resp.data.url); + core.setOutput('url_html', resp.data.html_url); + return results; }); - core.info(`Check run create response: ${resp.status}`); - core.info(`Check run URL: ${resp.data.url}`); - core.info(`Check run HTML: ${resp.data.html_url}`); - return results; } getParser(reporter, options) { switch (reporter) { @@ -383,6 +434,8 @@ class TestReporter { return new jest_junit_parser_1.JestJunitParser(options); case 'mocha-json': return new mocha_json_parser_1.MochaJsonParser(options); + case 'swift-xunit': + return new swift_xunit_parser_1.SwiftXunitParser(options); default: throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`); } @@ -394,10 +447,19 @@ main(); /***/ }), /***/ 4528: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DartJsonParser = void 0; const path_utils_1 = __nccwpck_require__(4070); @@ -451,10 +513,12 @@ class DartJsonParser { this.options = options; this.sdk = sdk; } - async parse(path, content) { - const tr = this.getTestRun(path, content); - const result = this.getTestRunResult(tr); - return Promise.resolve(result); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const tr = this.getTestRun(path, content); + const result = this.getTestRunResult(tr); + return Promise.resolve(result); + }); } getTestRun(path, content) { const lines = content.split(/\n\r?/g); @@ -467,7 +531,8 @@ class DartJsonParser { return JSON.parse(str); } catch (e) { - const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : ''; + const errWithCol = e; + const col = errWithCol.columnNumber !== undefined ? `:${errWithCol.columnNumber}` : ''; throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\n\n${e}`); } }) @@ -477,29 +542,29 @@ class DartJsonParser { const suites = {}; const tests = {}; for (const evt of events) { - if (dart_json_types_1.isSuiteEvent(evt)) { + if ((0, dart_json_types_1.isSuiteEvent)(evt)) { suites[evt.suite.id] = new TestSuite(evt.suite); } - else if (dart_json_types_1.isGroupEvent(evt)) { + else if ((0, dart_json_types_1.isGroupEvent)(evt)) { suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group); } - else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) { + else if ((0, dart_json_types_1.isTestStartEvent)(evt) && evt.test.url !== null) { const test = new TestCase(evt); const suite = suites[evt.test.suiteID]; const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]]; group.tests.push(test); tests[evt.test.id] = test; } - else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden && tests[evt.testID]) { + else if ((0, dart_json_types_1.isTestDoneEvent)(evt) && !evt.hidden && tests[evt.testID]) { tests[evt.testID].testDone = evt; } - else if (dart_json_types_1.isErrorEvent(evt) && tests[evt.testID]) { + else if ((0, dart_json_types_1.isErrorEvent)(evt) && tests[evt.testID]) { tests[evt.testID].error = evt; } - else if (dart_json_types_1.isMessageEvent(evt) && tests[evt.testID]) { + else if ((0, dart_json_types_1.isMessageEvent)(evt) && tests[evt.testID]) { tests[evt.testID].print.push(evt); } - else if (dart_json_types_1.isDoneEvent(evt)) { + else if ((0, dart_json_types_1.isDoneEvent)(evt)) { success = evt.success; totalTime = evt.time; } @@ -584,7 +649,7 @@ class DartJsonParser { const match = str.match(re); if (match !== null) { const [_, pathStr, lineStr] = match; - const path = path_utils_1.normalizeFilePath(this.getRelativePath(pathStr)); + const path = (0, path_utils_1.normalizeFilePath)(this.getRelativePath(pathStr)); if (trackedFiles.includes(path)) { const line = parseInt(lineStr); return { path, line }; @@ -597,7 +662,7 @@ class DartJsonParser { if (path.startsWith(prefix)) { path = path.substr(prefix.length); } - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -606,7 +671,7 @@ class DartJsonParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DartJsonParser = DartJsonParser; @@ -655,10 +720,19 @@ exports.isMessageEvent = isMessageEvent; /***/ }), /***/ 2664: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DotnetTrxParser = void 0; const xml2js_1 = __nccwpck_require__(6189); @@ -693,20 +767,24 @@ class DotnetTrxParser { constructor(options) { this.options = options; } - async parse(path, content) { - const trx = await this.getTrxReport(path, content); - const tc = this.getTestClasses(trx); - const tr = this.getTestRunResult(path, trx, tc); - tr.sort(true); - return tr; + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const trx = yield this.getTrxReport(path, content); + const tc = this.getTestClasses(trx); + const tr = this.getTestRunResult(path, trx, tc); + tr.sort(true); + return tr; + }); } - async getTrxReport(path, content) { - try { - return (await xml2js_1.parseStringPromise(content)); - } - catch (e) { - throw new Error(`Invalid XML at ${path}\n\n${e}`); - } + getTrxReport(path, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return (yield (0, xml2js_1.parseStringPromise)(content)); + } + catch (e) { + throw new Error(`Invalid XML at ${path}\n\n${e}`); + } + }); } getTestClasses(trx) { if (trx.TestRun.TestDefinitions === undefined || trx.TestRun.Results === undefined) { @@ -732,7 +810,7 @@ class DotnetTrxParser { } const error = this.getErrorInfo(r.result); const durationAttr = r.result.$.duration; - const duration = durationAttr ? parse_utils_1.parseNetDuration(durationAttr) : 0; + const duration = durationAttr ? (0, parse_utils_1.parseNetDuration)(durationAttr) : 0; const resultTestName = r.result.$.testName; const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.' ? resultTestName.substr(className.length + 1) @@ -745,7 +823,7 @@ class DotnetTrxParser { } getTestRunResult(path, trx, testClasses) { const times = trx.TestRun.Times[0].$; - const totalTime = parse_utils_1.parseIsoDate(times.finish).getTime() - parse_utils_1.parseIsoDate(times.start).getTime(); + const totalTime = (0, parse_utils_1.parseIsoDate)(times.finish).getTime() - (0, parse_utils_1.parseIsoDate)(times.start).getTime(); const suites = testClasses.map(testClass => { const tests = testClass.tests.map(test => { const error = this.getError(test); @@ -800,7 +878,7 @@ class DotnetTrxParser { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; - const filePath = path_utils_1.normalizeFilePath(fileStr); + const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); const workDir = this.getWorkDir(filePath); if (workDir) { const file = filePath.substr(workDir.length); @@ -814,7 +892,7 @@ class DotnetTrxParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DotnetTrxParser = DotnetTrxParser; @@ -829,7 +907,11 @@ exports.DotnetTrxParser = DotnetTrxParser; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -846,10 +928,20 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JavaJunitParser = void 0; -const path = __importStar(__nccwpck_require__(5622)); +const path = __importStar(__nccwpck_require__(1017)); const xml2js_1 = __nccwpck_require__(6189); +const java_stack_trace_element_parser_1 = __nccwpck_require__(5775); const path_utils_1 = __nccwpck_require__(4070); const test_results_1 = __nccwpck_require__(2768); class JavaJunitParser { @@ -861,38 +953,42 @@ class JavaJunitParser { for (const filePath of options.trackedFiles) { const fileName = path.basename(filePath); const files = (_a = this.trackedFiles[fileName]) !== null && _a !== void 0 ? _a : (this.trackedFiles[fileName] = []); - files.push(path_utils_1.normalizeFilePath(filePath)); + files.push((0, path_utils_1.normalizeFilePath)(filePath)); } } - async parse(filePath, content) { - const reportOrSuite = await this.getJunitReport(filePath, content); - const isReport = reportOrSuite.testsuites !== undefined; - // XML might contain: - // - multiple suites under root node - // - single as root node - let ju; - if (isReport) { - ju = reportOrSuite; - } - else { - // Make it behave the same way as if suite was inside root node - const suite = reportOrSuite.testsuite; - ju = { - testsuites: { - $: { time: suite.$.time }, - testsuite: [suite] - } - }; - } - return this.getTestRunResult(filePath, ju); + parse(filePath, content) { + return __awaiter(this, void 0, void 0, function* () { + const reportOrSuite = yield this.getJunitReport(filePath, content); + const isReport = reportOrSuite.testsuites !== undefined; + // XML might contain: + // - multiple suites under root node + // - single as root node + let ju; + if (isReport) { + ju = reportOrSuite; + } + else { + // Make it behave the same way as if suite was inside root node + const suite = reportOrSuite.testsuite; + ju = { + testsuites: { + $: { time: suite.$.time }, + testsuite: [suite] + } + }; + } + return this.getTestRunResult(filePath, ju); + }); } - async getJunitReport(filePath, content) { - try { - return await xml2js_1.parseStringPromise(content); - } - catch (e) { - throw new Error(`Invalid XML at ${filePath}\n\n${e}`); - } + getJunitReport(filePath, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return yield (0, xml2js_1.parseStringPromise)(content); + } + catch (e) { + throw new Error(`Invalid XML at ${filePath}\n\n${e}`); + } + }); } getTestRunResult(filePath, junit) { var _a; @@ -957,10 +1053,12 @@ class JavaJunitParser { const details = typeof failure === 'object' ? failure._ : failure; let filePath; let line; - const src = this.exceptionThrowSource(details); - if (src) { - filePath = src.filePath; - line = src.line; + if (details != null) { + const src = this.exceptionThrowSource(details); + if (src) { + filePath = src.filePath; + line = src.line; + } } return { path: filePath, @@ -971,11 +1069,10 @@ class JavaJunitParser { } exceptionThrowSource(stackTrace) { const lines = stackTrace.split(/\r?\n/); - const re = /^at (.*)\((.*):(\d+)\)$/; for (const str of lines) { - const match = str.match(re); - if (match !== null) { - const [_, tracePath, fileName, lineStr] = match; + const stackTraceElement = (0, java_stack_trace_element_parser_1.parseStackTraceElement)(str); + if (stackTraceElement) { + const { tracePath, fileName, lineStr } = stackTraceElement; const filePath = this.getFilePath(tracePath, fileName); if (filePath !== undefined) { const line = parseInt(lineStr); @@ -1028,13 +1125,65 @@ class JavaJunitParser { exports.JavaJunitParser = JavaJunitParser; +/***/ }), + +/***/ 5775: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseStackTraceElement = void 0; +// classloader and module name are optional: +// at //(:) +// https://github.com/eclipse-openj9/openj9/issues/11452#issuecomment-754946992 +const re = /^\s*at (\S+\/\S*\/)?(.*)\((.*):(\d+)\)$/; +function parseStackTraceElement(stackTraceLine) { + const match = stackTraceLine.match(re); + if (match !== null) { + const [_, maybeClassLoaderAndModuleNameAndVersion, tracePath, fileName, lineStr] = match; + const { classLoader, moduleNameAndVersion } = parseClassLoaderAndModule(maybeClassLoaderAndModuleNameAndVersion); + return { + classLoader, + moduleNameAndVersion, + tracePath, + fileName, + lineStr + }; + } + return undefined; +} +exports.parseStackTraceElement = parseStackTraceElement; +function parseClassLoaderAndModule(maybeClassLoaderAndModuleNameAndVersion) { + if (maybeClassLoaderAndModuleNameAndVersion) { + const res = maybeClassLoaderAndModuleNameAndVersion.split('/'); + const classLoader = res[0]; + let moduleNameAndVersion = res[1]; + if (moduleNameAndVersion === '') { + moduleNameAndVersion = undefined; + } + return { classLoader, moduleNameAndVersion }; + } + return { classLoader: undefined, moduleNameAndVersion: undefined }; +} + + /***/ }), /***/ 1113: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JestJunitParser = void 0; const xml2js_1 = __nccwpck_require__(6189); @@ -1045,23 +1194,27 @@ class JestJunitParser { constructor(options) { this.options = options; } - async parse(path, content) { - const ju = await this.getJunitReport(path, content); - return this.getTestRunResult(path, ju); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const ju = yield this.getJunitReport(path, content); + return this.getTestRunResult(path, ju); + }); } - async getJunitReport(path, content) { - try { - return (await xml2js_1.parseStringPromise(content)); - } - catch (e) { - throw new Error(`Invalid XML at ${path}\n\n${e}`); - } + getJunitReport(path, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return (yield (0, xml2js_1.parseStringPromise)(content)); + } + catch (e) { + throw new Error(`Invalid XML at ${path}\n\n${e}`); + } + }); } getTestRunResult(path, junit) { const suites = junit.testsuites.testsuite === undefined ? [] : junit.testsuites.testsuite.map(ts => { - const name = ts.$.name.trim(); + const name = this.escapeCharacters(ts.$.name.trim()); const time = parseFloat(ts.$.time) * 1000; const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time); return sr; @@ -1070,6 +1223,9 @@ class JestJunitParser { return new test_results_1.TestRunResult(path, suites, time); } getGroups(suite) { + if (!suite.testcase) { + return []; + } const groups = []; for (const tc of suite.testcase) { let grp = groups.find(g => g.describe === tc.$.classname); @@ -1104,7 +1260,7 @@ class JestJunitParser { const details = tc.failure[0]; let path; let line; - const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file)); + const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; @@ -1116,7 +1272,7 @@ class JestJunitParser { }; } getRelativePath(path) { - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -1125,7 +1281,10 @@ class JestJunitParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); + } + escapeCharacters(s) { + return s.replace(/([<>])/g, '\\$1'); } } exports.JestJunitParser = JestJunitParser; @@ -1134,10 +1293,19 @@ exports.JestJunitParser = JestJunitParser; /***/ }), /***/ 6043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MochaJsonParser = void 0; const test_results_1 = __nccwpck_require__(2768); @@ -1147,11 +1315,13 @@ class MochaJsonParser { constructor(options) { this.options = options; } - async parse(path, content) { - const mocha = this.getMochaJson(path, content); - const result = this.getTestRunResult(path, mocha); - result.sort(true); - return Promise.resolve(result); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const mocha = this.getMochaJson(path, content); + const result = this.getTestRunResult(path, mocha); + result.sort(true); + return Promise.resolve(result); + }); } getMochaJson(path, content) { try { @@ -1205,7 +1375,7 @@ class MochaJsonParser { } let path; let line; - const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file)); + const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; @@ -1218,7 +1388,7 @@ class MochaJsonParser { }; } getRelativePath(path) { - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -1227,12 +1397,31 @@ class MochaJsonParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.MochaJsonParser = MochaJsonParser; +/***/ }), + +/***/ 5366: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SwiftXunitParser = void 0; +const java_junit_parser_1 = __nccwpck_require__(676); +class SwiftXunitParser extends java_junit_parser_1.JavaJunitParser { + constructor(options) { + super(options); + this.options = options; + } +} +exports.SwiftXunitParser = SwiftXunitParser; + + /***/ }), /***/ 5867: @@ -1275,7 +1464,7 @@ function getAnnotations(results, maxCount) { suiteName: ts.name, testName: tg.name ? `${tg.name} ► ${tc.name}` : tc.name, details: err.details, - message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : parse_utils_1.getFirstNonEmptyLine(err.details)) !== null && _d !== void 0 ? _d : 'Test failed', + message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : (0, parse_utils_1.getFirstNonEmptyLine)(err.details)) !== null && _d !== void 0 ? _d : 'Test failed', path, line }); @@ -1290,7 +1479,7 @@ function getAnnotations(results, maxCount) { 'Failed test found in:', e.testRunPaths.map(p => ` ${p}`).join('\n'), 'Error:', - ident(markdown_utils_1.fixEol(e.message), ' ') + ident((0, markdown_utils_1.fixEol)(e.message), ' ') ].join('\n'); return enforceCheckRunLimits({ path: e.path, @@ -1298,7 +1487,7 @@ function getAnnotations(results, maxCount) { end_line: e.line, annotation_level: 'failure', title: `${e.suiteName} ► ${e.testName}`, - raw_details: markdown_utils_1.fixEol(e.details), + raw_details: (0, markdown_utils_1.fixEol)(e.details), message }); }); @@ -1306,10 +1495,10 @@ function getAnnotations(results, maxCount) { } exports.getAnnotations = getAnnotations; function enforceCheckRunLimits(err) { - err.title = markdown_utils_1.ellipsis(err.title || '', 255); - err.message = markdown_utils_1.ellipsis(err.message, 65535); + err.title = (0, markdown_utils_1.ellipsis)(err.title || '', 255); + err.message = (0, markdown_utils_1.ellipsis)(err.message, 65535); if (err.raw_details) { - err.raw_details = markdown_utils_1.ellipsis(err.raw_details, 65535); + err.raw_details = (0, markdown_utils_1.ellipsis)(err.raw_details, 65535); } return err; } @@ -1330,7 +1519,11 @@ function ident(text, prefix) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1351,6 +1544,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getReport = void 0; const core = __importStar(__nccwpck_require__(2186)); const markdown_utils_1 = __nccwpck_require__(6482); +const node_utils_1 = __nccwpck_require__(5824); const parse_utils_1 = __nccwpck_require__(7811); const slugger_1 = __nccwpck_require__(3328); const MAX_REPORT_LENGTH = 65535; @@ -1363,7 +1557,7 @@ const defaultOptions = { function getReport(results, options = defaultOptions) { core.info('Generating check run summary'); applySort(results); - const opts = { ...options }; + const opts = Object.assign({}, options); let lines = renderReport(results, opts); let report = lines.join('\n'); if (getByteLength(report) <= MAX_REPORT_LENGTH) { @@ -1409,9 +1603,9 @@ function trimReport(lines) { return reportLines.join('\n'); } function applySort(results) { - results.sort((a, b) => a.path.localeCompare(b.path)); + results.sort((a, b) => a.path.localeCompare(b.path, node_utils_1.DEFAULT_LOCALE)); for (const res of results) { - res.suites.sort((a, b) => a.name.localeCompare(b.name)); + res.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } function getByteLength(text) { @@ -1458,16 +1652,16 @@ function getTestRunsReport(testRuns, options) { const sections = []; if (testRuns.length > 1 || options.onlySummary) { const tableData = testRuns.map((tr, runIndex) => { - const time = markdown_utils_1.formatTime(tr.time); + const time = (0, markdown_utils_1.formatTime)(tr.time); const name = tr.path; const addr = options.baseUrl + makeRunSlug(runIndex).link; - const nameLink = markdown_utils_1.link(name, addr); + const nameLink = (0, markdown_utils_1.link)(name, addr); const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : ''; const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : ''; return [nameLink, passed, failed, skipped, time]; }); - const resultsTable = markdown_utils_1.table(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData); + const resultsTable = (0, markdown_utils_1.table)(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData); sections.push(resultsTable); } if (options.onlySummary === false) { @@ -1482,19 +1676,19 @@ function getSuitesReport(tr, runIndex, options) { const nameLink = `${tr.path}`; const icon = getResultIcon(tr.result); sections.push(`## ${icon}\xa0${nameLink}`); - const time = markdown_utils_1.formatTime(tr.time); + const time = (0, markdown_utils_1.formatTime)(tr.time); const headingLine2 = tr.tests > 0 ? `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.` : 'No tests found'; sections.push(headingLine2); const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites; if (suites.length > 0) { - const suitesTable = markdown_utils_1.table(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => { - const tsTime = markdown_utils_1.formatTime(s.time); + const suitesTable = (0, markdown_utils_1.table)(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => { + const tsTime = (0, markdown_utils_1.formatTime)(s.time); const tsName = s.name; const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed'); const tsAddr = options.baseUrl + makeSuiteSlug(runIndex, suiteIndex).link; - const tsNameLink = skipLink ? tsName : markdown_utils_1.link(tsName, tsAddr); + const tsNameLink = skipLink ? tsName : (0, markdown_utils_1.link)(tsName, tsAddr); const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : ''; const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : ''; @@ -1535,7 +1729,7 @@ function getTestsReport(ts, runIndex, suiteIndex, options) { const result = getResultIcon(tc.result); sections.push(`${space}${result} ${tc.name}`); if (tc.error) { - const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = parse_utils_1.getFirstNonEmptyLine(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\r?\n/g).map(l => '\t' + l); + const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = (0, parse_utils_1.getFirstNonEmptyLine)(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\r?\n/g).map(l => '\t' + l); if (lines) { sections.push(...lines); } @@ -1547,11 +1741,11 @@ function getTestsReport(ts, runIndex, suiteIndex, options) { } function makeRunSlug(runIndex) { // use prefix to avoid slug conflicts after escaping the paths - return slugger_1.slug(`r${runIndex}`); + return (0, slugger_1.slug)(`r${runIndex}`); } function makeSuiteSlug(runIndex, suiteIndex) { // use prefix to avoid slug conflicts after escaping the paths - return slugger_1.slug(`r${runIndex}s${suiteIndex}`); + return (0, slugger_1.slug)(`r${runIndex}s${suiteIndex}`); } function getResultIcon(result) { switch (result) { @@ -1570,12 +1764,13 @@ function getResultIcon(result) { /***/ }), /***/ 2768: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0; +const node_utils_1 = __nccwpck_require__(5824); class TestRunResult { constructor(path, suites, totalTime) { this.path = path; @@ -1605,7 +1800,7 @@ class TestRunResult { return this.suites.filter(s => s.result === 'failed'); } sort(deep) { - this.suites.sort((a, b) => a.name.localeCompare(b.name)); + this.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); if (deep) { for (const suite of this.suites) { suite.sort(deep); @@ -1643,7 +1838,7 @@ class TestSuiteResult { return this.groups.filter(grp => grp.result === 'failed'); } sort(deep) { - this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : ''); }); + this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : '', node_utils_1.DEFAULT_LOCALE); }); if (deep) { for (const grp of this.groups) { grp.sort(); @@ -1676,7 +1871,7 @@ class TestGroupResult { return this.tests.filter(tc => tc.result === 'failed'); } sort() { - this.tests.sort((a, b) => a.name.localeCompare(b.name)); + this.tests.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } exports.TestGroupResult = TestGroupResult; @@ -1691,31 +1886,6 @@ class TestCaseResult { exports.TestCaseResult = TestCaseResult; -/***/ }), - -/***/ 6069: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const exec_1 = __nccwpck_require__(1514); -// Wraps original exec() function -// Returns exit code and whole stdout/stderr -async function exec(commandLine, args, options) { - options = options || {}; - let stdout = ''; - let stderr = ''; - options.listeners = { - stdout: (data) => (stdout += data.toString()), - stderr: (data) => (stderr += data.toString()) - }; - const code = await exec_1.exec(commandLine, args, options); - return { code, stdout, stderr }; -} -exports.default = exec; - - /***/ }), /***/ 9844: @@ -1725,7 +1895,11 @@ exports.default = exec; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1742,24 +1916,32 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = void 0; const core = __importStar(__nccwpck_require__(2186)); -const exec_1 = __importDefault(__nccwpck_require__(6069)); -async function listFiles() { - core.startGroup('Listing all files tracked by git'); - let output = ''; - try { - output = (await exec_1.default('git', ['ls-files', '-z'])).stdout; - } - finally { - fixStdOutNullTermination(); - core.endGroup(); - } - return output.split('\u0000').filter(s => s.length > 0); +const exec_1 = __nccwpck_require__(1514); +function listFiles() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Listing all files tracked by git'); + let output = ''; + try { + output = (yield (0, exec_1.getExecOutput)('git', ['ls-files', '-z'])).stdout; + } + finally { + fixStdOutNullTermination(); + core.endGroup(); + } + return output.split('\u0000').filter(s => s.length > 0); + }); } exports.listFiles = listFiles; function fixStdOutNullTermination() { @@ -1779,7 +1961,11 @@ function fixStdOutNullTermination() { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1796,18 +1982,27 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0; -const fs_1 = __nccwpck_require__(5747); +const fs_1 = __nccwpck_require__(7147); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); -const stream = __importStar(__nccwpck_require__(2413)); -const util_1 = __nccwpck_require__(1669); +const stream = __importStar(__nccwpck_require__(2781)); +const util_1 = __nccwpck_require__(3837); const got_1 = __importDefault(__nccwpck_require__(3061)); -const asyncStream = util_1.promisify(stream.pipeline); +const asyncStream = (0, util_1.promisify)(stream.pipeline); function getCheckRunContext() { if (github.context.eventName === 'workflow_run') { core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow'); @@ -1829,91 +2024,83 @@ function getCheckRunContext() { return { sha: github.context.sha, runId }; } exports.getCheckRunContext = getCheckRunContext; -async function downloadArtifact(octokit, artifactId, fileName, token) { - core.startGroup(`Downloading artifact ${fileName}`); - try { - core.info(`Artifact ID: ${artifactId}`); - const req = octokit.actions.downloadArtifact.endpoint({ - ...github.context.repo, - artifact_id: artifactId, - archive_format: 'zip' - }); - const headers = { - Authorization: `Bearer ${token}` - }; - const resp = await got_1.default(req.url, { - headers, - followRedirect: false - }); - core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`); - if (resp.statusCode !== 302) { - throw new Error('Fetch artifact URL failed: received unexpected status code'); - } - const url = resp.headers.location; - if (url === undefined) { - const receivedHeaders = Object.keys(resp.headers); - core.info(`Received headers: ${receivedHeaders.join(', ')}`); - throw new Error('Location header was not found in API response'); - } - if (typeof url !== 'string') { - throw new Error(`Location header has unexpected value: ${url}`); - } - const downloadStream = got_1.default.stream(url, { headers }); - const fileWriterStream = fs_1.createWriteStream(fileName); - core.info(`Downloading ${url}`); - downloadStream.on('downloadProgress', ({ transferred }) => { - core.info(`Progress: ${transferred} B`); - }); - await asyncStream(downloadStream, fileWriterStream); - } - finally { - core.endGroup(); - } +function downloadArtifact(octokit, artifactId, fileName, token) { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup(`Downloading artifact ${fileName}`); + try { + core.info(`Artifact ID: ${artifactId}`); + const req = octokit.rest.actions.downloadArtifact.endpoint(Object.assign(Object.assign({}, github.context.repo), { artifact_id: artifactId, archive_format: 'zip' })); + const headers = { + Authorization: `Bearer ${token}` + }; + const resp = yield (0, got_1.default)(req.url, { + headers, + followRedirect: false + }); + core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`); + if (resp.statusCode !== 302) { + throw new Error('Fetch artifact URL failed: received unexpected status code'); + } + const url = resp.headers.location; + if (url === undefined) { + const receivedHeaders = Object.keys(resp.headers); + core.info(`Received headers: ${receivedHeaders.join(', ')}`); + throw new Error('Location header was not found in API response'); + } + if (typeof url !== 'string') { + throw new Error(`Location header has unexpected value: ${url}`); + } + const downloadStream = got_1.default.stream(url, { headers }); + const fileWriterStream = (0, fs_1.createWriteStream)(fileName); + core.info(`Downloading ${url}`); + downloadStream.on('downloadProgress', ({ transferred }) => { + core.info(`Progress: ${transferred} B`); + }); + yield asyncStream(downloadStream, fileWriterStream); + } + finally { + core.endGroup(); + } + }); } exports.downloadArtifact = downloadArtifact; -async function listFiles(octokit, sha) { - core.startGroup('Fetching list of tracked files from GitHub'); - try { - const commit = await octokit.git.getCommit({ - commit_sha: sha, - ...github.context.repo - }); - const files = await listGitTree(octokit, commit.data.tree.sha, ''); - return files; - } - finally { - core.endGroup(); - } +function listFiles(octokit, sha) { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Fetching list of tracked files from GitHub'); + try { + const commit = yield octokit.rest.git.getCommit(Object.assign({ commit_sha: sha }, github.context.repo)); + const files = yield listGitTree(octokit, commit.data.tree.sha, ''); + return files; + } + finally { + core.endGroup(); + } + }); } exports.listFiles = listFiles; -async function listGitTree(octokit, sha, path) { - const pathLog = path ? ` at ${path}` : ''; - core.info(`Fetching tree ${sha}${pathLog}`); - let truncated = false; - let tree = await octokit.git.getTree({ - recursive: 'true', - tree_sha: sha, - ...github.context.repo - }); - if (tree.data.truncated) { - truncated = true; - tree = await octokit.git.getTree({ - tree_sha: sha, - ...github.context.repo - }); - } - const result = []; - for (const tr of tree.data.tree) { - const file = `${path}${tr.path}`; - if (tr.type === 'blob') { - result.push(file); +function listGitTree(octokit, sha, path) { + return __awaiter(this, void 0, void 0, function* () { + const pathLog = path ? ` at ${path}` : ''; + core.info(`Fetching tree ${sha}${pathLog}`); + let truncated = false; + let tree = yield octokit.rest.git.getTree(Object.assign({ recursive: 'true', tree_sha: sha }, github.context.repo)); + if (tree.data.truncated) { + truncated = true; + tree = yield octokit.rest.git.getTree(Object.assign({ tree_sha: sha }, github.context.repo)); } - else if (tr.type === 'tree' && truncated) { - const files = await listGitTree(octokit, tr.sha, `${file}/`); - result.push(...files); + const result = []; + for (const tr of tree.data.tree) { + const file = `${path}${tr.path}`; + if (tr.type === 'blob') { + result.push(file); + } + else if (tr.type === 'tree' && truncated) { + const files = yield listGitTree(octokit, tr.sha, `${file}/`); + result.push(...files); + } } - } - return result; + return result; + }); } @@ -1932,10 +2119,10 @@ var Align; Align["Center"] = ":---:"; Align["Right"] = "---:"; Align["None"] = "---"; -})(Align = exports.Align || (exports.Align = {})); +})(Align || (exports.Align = Align = {})); exports.Icon = { - skip: '✖️', - success: '✔️', + skip: '⚪', // ':white_circle:' + success: '✅', // ':white_check_mark:' fail: '❌' // ':x:' }; function link(title, address) { @@ -1982,8 +2169,9 @@ exports.formatTime = formatTime; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExceptionSource = void 0; +exports.getExceptionSource = exports.DEFAULT_LOCALE = void 0; const path_utils_1 = __nccwpck_require__(4070); +exports.DEFAULT_LOCALE = 'en-US'; function getExceptionSource(stackTrace, trackedFiles, getRelativePath) { const lines = stackTrace.split(/\r?\n/); const re = /\((.*):(\d+):\d+\)$/; @@ -1991,7 +2179,7 @@ function getExceptionSource(stackTrace, trackedFiles, getRelativePath) { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; - const filePath = path_utils_1.normalizeFilePath(fileStr); + const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) { continue; } @@ -2037,8 +2225,8 @@ function parseIsoDate(str) { } exports.parseIsoDate = parseIsoDate; function getFirstNonEmptyLine(stackTrace) { - const lines = stackTrace.split(/\r?\n/g); - return lines.find(str => !/^\s*$/.test(str)); + const lines = stackTrace === null || stackTrace === void 0 ? void 0 : stackTrace.split(/\r?\n/g); + return lines === null || lines === void 0 ? void 0 : lines.find(str => !/^\s*$/.test(str)); } exports.getFirstNonEmptyLine = getFirstNonEmptyLine; @@ -2121,15 +2309,28 @@ exports.slug = slug; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__nccwpck_require__(2087)); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); const utils_1 = __nccwpck_require__(5278); /** * Commands @@ -2207,6 +2408,25 @@ function escapeProperty(s) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2216,19 +2436,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2087)); -const path = __importStar(__nccwpck_require__(5622)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action */ @@ -2257,13 +2472,9 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } + command_1.issueCommand('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** @@ -2281,7 +2492,7 @@ exports.setSecret = setSecret; function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { - file_command_1.issueCommand('PATH', inputPath); + file_command_1.issueFileCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); @@ -2290,7 +2501,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -2301,9 +2514,52 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -2312,7 +2568,12 @@ exports.getInput = getInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } exports.setOutput = setOutput; /** @@ -2358,19 +2619,30 @@ exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** - * Adds an warning issue + * Adds a warning issue * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; /** * Writes info to log with console.log. * @param message info message @@ -2430,7 +2702,11 @@ exports.group = group; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } exports.saveState = saveState; /** @@ -2443,6 +2719,29 @@ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), @@ -2453,20 +2752,34 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(5747)); -const os = __importStar(__nccwpck_require__(2087)); +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(5840); const utils_1 = __nccwpck_require__(5278); -function issueCommand(command, message) { +function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); @@ -2478,11 +2791,465 @@ function issueCommand(command, message) { encoding: 'utf8' }); } -exports.issueCommand = issueCommand; +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; //# sourceMappingURL=file-command.js.map /***/ }), +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); +const core_1 = __nccwpck_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + /***/ 5278: /***/ ((__unused_webpack_module, exports) => { @@ -2491,6 +3258,7 @@ exports.issueCommand = issueCommand; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string @@ -2505,6 +3273,26 @@ function toCommandValue(input) { return JSON.stringify(input); } exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; //# sourceMappingURL=utils.js.map /***/ }), @@ -2514,6 +3302,25 @@ exports.toCommandValue = toCommandValue; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2523,14 +3330,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(1576); const tr = __importStar(__nccwpck_require__(8159)); /** * Exec a command. @@ -2556,6 +3358,51 @@ function exec(commandLine, args, options) { }); } exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; //# sourceMappingURL=exec.js.map /***/ }), @@ -2565,6 +3412,25 @@ exports.exec = exec; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2574,20 +3440,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__nccwpck_require__(2087)); -const events = __importStar(__nccwpck_require__(8614)); -const child = __importStar(__nccwpck_require__(3129)); -const path = __importStar(__nccwpck_require__(5622)); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const events = __importStar(__nccwpck_require__(2361)); +const child = __importStar(__nccwpck_require__(2081)); +const path = __importStar(__nccwpck_require__(1017)); const io = __importStar(__nccwpck_require__(7436)); const ioUtil = __importStar(__nccwpck_require__(1962)); +const timers_1 = __nccwpck_require__(9512); /* eslint-disable @typescript-eslint/unbound-method */ const IS_WINDOWS = process.platform === 'win32'; /* @@ -2657,11 +3518,12 @@ class ToolRunner extends events.EventEmitter { s = s.substring(n + os.EOL.length); n = s.indexOf(os.EOL); } - strBuffer = s; + return s; } catch (err) { // streaming lines to console is best effort. Don't fail a build. this._debug(`error processing line. Failed with error ${err}`); + return ''; } } _getSpawnFileName() { @@ -2943,7 +3805,7 @@ class ToolRunner extends events.EventEmitter { // if the tool is only a file name, then resolve it from the PATH // otherwise verify it exists (add extension on Windows if necessary) this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug('arguments:'); for (const arg of this.args) { @@ -2957,9 +3819,12 @@ class ToolRunner extends events.EventEmitter { state.on('debug', (message) => { this._debug(message); }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } const fileName = this._getSpawnFileName(); const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - const stdbuffer = ''; + let stdbuffer = ''; if (cp.stdout) { cp.stdout.on('data', (data) => { if (this.options.listeners && this.options.listeners.stdout) { @@ -2968,14 +3833,14 @@ class ToolRunner extends events.EventEmitter { if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(data); } - this._processLineBuffer(data, stdbuffer, (line) => { + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { if (this.options.listeners && this.options.listeners.stdline) { this.options.listeners.stdline(line); } }); }); } - const errbuffer = ''; + let errbuffer = ''; if (cp.stderr) { cp.stderr.on('data', (data) => { state.processStderr = true; @@ -2990,7 +3855,7 @@ class ToolRunner extends events.EventEmitter { : optionsNonNull.outStream; s.write(data); } - this._processLineBuffer(data, errbuffer, (line) => { + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { if (this.options.listeners && this.options.listeners.errline) { this.options.listeners.errline(line); } @@ -3037,7 +3902,7 @@ class ToolRunner extends events.EventEmitter { } cp.stdin.end(this.options.input); } - }); + })); }); } } @@ -3123,7 +3988,7 @@ class ExecState extends events.EventEmitter { this._setResult(); } else if (this.processExited) { - this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); } } _debug(message) { @@ -3174,17 +4039,18 @@ class ExecState extends events.EventEmitter { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Context = void 0; -const fs_1 = __nccwpck_require__(5747); -const os_1 = __nccwpck_require__(2087); +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); class Context { /** * Hydrate the context from the environment */ constructor() { + var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; @@ -3200,6 +4066,10 @@ class Context { this.job = process.env.GITHUB_JOB; this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -3231,7 +4101,11 @@ exports.Context = Context; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -3244,7 +4118,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -3259,8 +4133,9 @@ exports.context = new Context.Context(); * @param token the repo PAT or GITHUB_TOKEN * @param options other options to set */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map @@ -3274,7 +4149,11 @@ exports.getOctokit = getOctokit; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -3287,13 +4166,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(9925)); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(6255)); +const undici_1 = __nccwpck_require__(1773); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -3309,6 +4198,19 @@ function getProxyAgent(destinationUrl) { return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } @@ -3324,7 +4226,11 @@ exports.getApiBaseUrl = getApiBaseUrl; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -3337,12 +4243,12 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; const Context = __importStar(__nccwpck_require__(4087)); const Utils = __importStar(__nccwpck_require__(7914)); // octokit + plugins @@ -3351,13 +4257,14 @@ const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); const plugin_paginate_rest_1 = __nccwpck_require__(4193); exports.context = new Context.Context(); const baseUrl = Utils.getApiBaseUrl(); -const defaults = { +exports.defaults = { baseUrl, request: { - agent: Utils.getProxyAgent(baseUrl) + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } }; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); /** * Convience function to correctly format Octokit Options to pass into the constructor. * @@ -3378,16 +4285,139 @@ exports.getOctokitOptions = getOctokitOptions; /***/ }), -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 6255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const http = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); -const pm = __nccwpck_require__(6443); -let tunnel; +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -3417,22 +4447,22 @@ var HttpCodes; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; @@ -3465,20 +4495,35 @@ class HttpClientResponse { this.message = message; } readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); + const parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; @@ -3521,141 +4566,169 @@ class HttpClient { } } options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); } del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); } post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); } patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); } put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); } head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); } sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; } } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying return response; } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; + } while (numTries < maxTries); + return response; + }); } /** * Needs to be called if keepAlive is set to true in request options. @@ -3672,14 +4745,22 @@ class HttpClient { * @param data */ requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); + this.requestRawWithCallback(info, data, callbackForResult); + }); }); } /** @@ -3689,21 +4770,24 @@ class HttpClient { * @param onResult */ requestRawWithCallback(info, data, onResult) { - let socket; if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; - let handleResult = (err, res) => { + function handleResult(err, res) { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); }); + let socket; req.on('socket', sock => { socket = sock; }); @@ -3712,12 +4796,12 @@ class HttpClient { if (socket) { socket.end(); } - handleResult(new Error('Request timeout: ' + info.options.path), null); + handleResult(new Error(`Request timeout: ${info.options.path}`)); }); req.on('error', function (err) { // err has statusCode property // res should have headers - handleResult(err, null); + handleResult(err); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); @@ -3738,9 +4822,18 @@ class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); + const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -3762,21 +4855,19 @@ class HttpClient { info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach(handler => { + for (const handler of this.handlers) { handler.prepareRequest(info.options); - }); + } } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -3785,8 +4876,8 @@ class HttpClient { } _getAgent(parsedUrl) { let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } @@ -3794,27 +4885,22 @@ class HttpClient { agent = this._agent; } // if agent is already assigned use that agent. - if (!!agent) { + if (agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; - if (!!this.requestOptions) { + if (this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __nccwpck_require__(4294); - } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { const agentOptions = { - maxSockets: maxSockets, + maxSockets, keepAlive: this._keepAlive, - proxy: { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, - host: proxyUrl.hostname, - port: proxyUrl.port - } + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -3829,7 +4915,7 @@ class HttpClient { } // if reusing agent across request and tunneling agent isn't assigned create a new agent if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } @@ -3847,110 +4933,152 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } } - else { - obj = JSON.parse(contents); + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; } - response.result = obj; + response.headers = res.message.headers; } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; + catch (err) { + // Invalid resource (contents not json); leaving result obj null } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); } else { - msg = 'Failed request: (' + statusCode + ')'; + resolve(response); } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } + })); }); } } exports.HttpClient = HttpClient; - +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6443: +/***/ 9835: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; + const usingSsl = reqUrl.protocol === 'https:'; if (checkBypass(reqUrl)) { - return proxyUrl; + return undefined; } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = new URL(proxyVar); + return undefined; } - return proxyUrl; } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } @@ -3966,23 +5094,34 @@ function checkBypass(reqUrl) { reqPort = 443; } // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy + for (const upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; - +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map /***/ }), @@ -3991,6 +5130,25 @@ exports.checkBypass = checkBypass; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -4002,11 +5160,17 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -const assert_1 = __nccwpck_require__(2357); -const fs = __nccwpck_require__(5747); -const path = __nccwpck_require__(5622); -_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const path = __importStar(__nccwpck_require__(1017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; function exists(fsPath) { return __awaiter(this, void 0, void 0, function* () { try { @@ -4045,49 +5209,6 @@ function isRooted(p) { return p.startsWith('/'); } exports.isRooted = isRooted; -/** - * Recursively create a directory at `fsPath`. - * - * This implementation is optimistic, meaning it attempts to create the full - * path first, and backs up the path stack from there. - * - * @param fsPath The path to create - * @param maxDepth The maximum recursion depth - * @param depth The current recursion depth - */ -function mkdirP(fsPath, maxDepth = 1000, depth = 1) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - fsPath = path.resolve(fsPath); - if (depth >= maxDepth) - return exports.mkdir(fsPath); - try { - yield exports.mkdir(fsPath); - return; - } - catch (err) { - switch (err.code) { - case 'ENOENT': { - yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); - yield exports.mkdir(fsPath); - return; - } - default: { - let stats; - try { - stats = yield exports.stat(fsPath); - } - catch (err2) { - throw err; - } - if (!stats.isDirectory()) - throw err; - } - } - } - }); -} -exports.mkdirP = mkdirP; /** * Best effort attempt to determine whether a file exists and is executable. * @param filePath file path to check @@ -4184,6 +5305,12 @@ function isUnixExecutable(stats) { ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || ((stats.mode & 64) > 0 && stats.uid === process.getuid())); } +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; //# sourceMappingURL=io-util.js.map /***/ }), @@ -4193,6 +5320,25 @@ function isUnixExecutable(stats) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -4203,11 +5349,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const childProcess = __nccwpck_require__(3129); -const path = __nccwpck_require__(5622); -const util_1 = __nccwpck_require__(1669); -const ioUtil = __nccwpck_require__(1962); -const exec = util_1.promisify(childProcess.exec); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(9491); +const path = __importStar(__nccwpck_require__(1017)); +const ioUtil = __importStar(__nccwpck_require__(1962)); /** * Copies a file or folder. * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js @@ -4218,14 +5363,14 @@ const exec = util_1.promisify(childProcess.exec); */ function cp(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { - const { force, recursive } = readCopyOptions(options); + const { force, recursive, copySourceDirectory } = readCopyOptions(options); const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; // Dest is an existing file, but not forcing if (destStat && destStat.isFile() && !force) { return; } // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path.join(dest, path.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { @@ -4288,51 +5433,23 @@ exports.mv = mv; function rmRF(inputPath) { return __awaiter(this, void 0, void 0, function* () { if (ioUtil.IS_WINDOWS) { - // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another - // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. - try { - if (yield ioUtil.isDirectory(inputPath, true)) { - yield exec(`rd /s /q "${inputPath}"`); - } - else { - yield exec(`del /f /a "${inputPath}"`); - } - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - } - // Shelling out fails to remove a symlink folder with missing source, this unlink catches that - try { - yield ioUtil.unlink(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); } } - else { - let isDir = false; - try { - isDir = yield ioUtil.isDirectory(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - return; - } - if (isDir) { - yield exec(`rm -rf "${inputPath}"`); - } - else { - yield ioUtil.unlink(inputPath); - } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); } }); } @@ -4346,7 +5463,8 @@ exports.rmRF = rmRF; */ function mkdirP(fsPath) { return __awaiter(this, void 0, void 0, function* () { - yield ioUtil.mkdirP(fsPath); + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); }); } exports.mkdirP = mkdirP; @@ -4374,62 +5492,80 @@ function which(tool, check) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } + return result; } - try { - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { - for (const extension of process.env.PATHEXT.split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); } } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return filePath; - } - return ''; - } - // if any path separators, return empty - if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { - return ''; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; } - // return the first match - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); - if (filePath) { - return filePath; + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); } } - return ''; } - catch (err) { - throw new Error(`which failed with message ${err.message}`); + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } } + return matches; }); } -exports.which = which; +exports.findInPath = findInPath; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); - return { force, recursive }; + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; } function cpDirRecursive(sourceDir, destDir, currentDepth, force) { return __awaiter(this, void 0, void 0, function* () { @@ -4492,7 +5628,7 @@ function copyFile(srcFile, destFile, force) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); +const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, @@ -4520,8 +5656,11 @@ exports.createFileSystemAdapter = createFileSystemAdapter; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); const SUPPORTED_MAJOR_VERSION = 10; const SUPPORTED_MINOR_VERSION = 10; const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; @@ -4547,7 +5686,8 @@ const settings_1 = __nccwpck_require__(8662); exports.Settings = settings_1.default; function scandir(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } @@ -4581,15 +5721,17 @@ const utils = __nccwpck_require__(6297); const common = __nccwpck_require__(3847); function read(directory, settings, callback) { if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings, callback); + readdirWithFileTypes(directory, settings, callback); + return; } - return readdir(directory, settings, callback); + readdir(directory, settings, callback); } exports.read = read; function readdirWithFileTypes(directory, settings, callback) { settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { if (readdirError !== null) { - return callFailureCallback(callback, readdirError); + callFailureCallback(callback, readdirError); + return; } const entries = dirents.map((dirent) => ({ dirent, @@ -4597,12 +5739,14 @@ function readdirWithFileTypes(directory, settings, callback) { path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) })); if (!settings.followSymbolicLinks) { - return callSuccessCallback(callback, entries); + callSuccessCallback(callback, entries); + return; } const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); rpl(tasks, (rplError, rplEntries) => { if (rplError !== null) { - return callFailureCallback(callback, rplError); + callFailureCallback(callback, rplError); + return; } callSuccessCallback(callback, rplEntries); }); @@ -4612,46 +5756,54 @@ exports.readdirWithFileTypes = readdirWithFileTypes; function makeRplTaskEntry(entry, settings) { return (done) => { if (!entry.dirent.isSymbolicLink()) { - return done(null, entry); + done(null, entry); + return; } settings.fs.stat(entry.path, (statError, stats) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { - return done(statError); + done(statError); + return; } - return done(null, entry); + done(null, entry); + return; } entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - return done(null, entry); + done(null, entry); }); }; } function readdir(directory, settings, callback) { settings.fs.readdir(directory, (readdirError, names) => { if (readdirError !== null) { - return callFailureCallback(callback, readdirError); + callFailureCallback(callback, readdirError); + return; } - const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator)); - const tasks = filepaths.map((filepath) => { - return (done) => fsStat.stat(filepath, settings.fsStatSettings, done); + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; }); - rpl(tasks, (rplError, results) => { + rpl(tasks, (rplError, entries) => { if (rplError !== null) { - return callFailureCallback(callback, rplError); - } - const entries = []; - names.forEach((name, index) => { - const stats = results[index]; - const entry = { - name, - path: filepaths[index], - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - entries.push(entry); - }); + callFailureCallback(callback, rplError); + return; + } callSuccessCallback(callback, entries); }); }); @@ -4756,7 +5908,7 @@ exports.readdir = readdir; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const fsStat = __nccwpck_require__(109); const fs = __nccwpck_require__(3803); class Settings { @@ -4777,7 +5929,7 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), @@ -4829,7 +5981,7 @@ exports.fs = fs; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); +const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, @@ -4860,7 +6012,8 @@ const settings_1 = __nccwpck_require__(2410); exports.Settings = settings_1.default; function stat(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } @@ -4890,17 +6043,21 @@ exports.read = void 0; function read(path, settings, callback) { settings.fs.lstat(path, (lstatError, lstat) => { if (lstatError !== null) { - return callFailureCallback(callback, lstatError); + callFailureCallback(callback, lstatError); + return; } if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return callSuccessCallback(callback, lstat); + callSuccessCallback(callback, lstat); + return; } settings.fs.stat(path, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { - return callFailureCallback(callback, statError); + callFailureCallback(callback, statError); + return; } - return callSuccessCallback(callback, lstat); + callSuccessCallback(callback, lstat); + return; } if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; @@ -4970,7 +6127,7 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), @@ -4989,7 +6146,8 @@ const settings_1 = __nccwpck_require__(141); exports.Settings = settings_1.default; function walk(directory, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; } new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); } @@ -5028,22 +6186,22 @@ class AsyncProvider { this._root = _root; this._settings = _settings; this._reader = new async_1.default(this._root, this._settings); - this._storage = new Set(); + this._storage = []; } read(callback) { this._reader.onError((error) => { callFailureCallback(callback, error); }); this._reader.onEntry((entry) => { - this._storage.add(entry); + this._storage.push(entry); }); this._reader.onEnd(() => { - callSuccessCallback(callback, [...this._storage]); + callSuccessCallback(callback, this._storage); }); this._reader.read(); } } -exports.default = AsyncProvider; +exports["default"] = AsyncProvider; function callFailureCallback(callback, error) { callback(error); } @@ -5060,7 +6218,7 @@ function callSuccessCallback(callback, entries) { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); +const stream_1 = __nccwpck_require__(2781); const async_1 = __nccwpck_require__(5732); class StreamProvider { constructor(_root, _settings) { @@ -5091,7 +6249,7 @@ class StreamProvider { return this._stream; } } -exports.default = StreamProvider; +exports["default"] = StreamProvider; /***/ }), @@ -5113,7 +6271,7 @@ class SyncProvider { return this._reader.read(); } } -exports.default = SyncProvider; +exports["default"] = SyncProvider; /***/ }), @@ -5124,7 +6282,7 @@ exports.default = SyncProvider; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const events_1 = __nccwpck_require__(8614); +const events_1 = __nccwpck_require__(2361); const fsScandir = __nccwpck_require__(5667); const fastq = __nccwpck_require__(7340); const common = __nccwpck_require__(7988); @@ -5182,7 +6340,8 @@ class AsyncReader extends reader_1.default { _worker(item, done) { this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { if (error !== null) { - return done(error, undefined); + done(error, undefined); + return; } for (const entry of entries) { this._handleEntry(entry, item.base); @@ -5210,14 +6369,14 @@ class AsyncReader extends reader_1.default { this._emitEntry(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _emitEntry(entry) { this._emitter.emit('entry', entry); } } -exports.default = AsyncReader; +exports["default"] = AsyncReader; /***/ }), @@ -5275,7 +6434,7 @@ class Reader { this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); } } -exports.default = Reader; +exports["default"] = Reader; /***/ }), @@ -5293,13 +6452,13 @@ class SyncReader extends reader_1.default { constructor() { super(...arguments); this._scandir = fsScandir.scandirSync; - this._storage = new Set(); + this._storage = []; this._queue = new Set(); } read() { this._pushToQueue(this._root, this._settings.basePath); this._handleQueue(); - return [...this._storage]; + return this._storage; } _pushToQueue(directory, base) { this._queue.add({ directory, base }); @@ -5335,14 +6494,14 @@ class SyncReader extends reader_1.default { this._pushToStorage(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _pushToStorage(entry) { - this._storage.add(entry); + this._storage.push(entry); } } -exports.default = SyncReader; +exports["default"] = SyncReader; /***/ }), @@ -5353,13 +6512,13 @@ exports.default = SyncReader; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const fsScandir = __nccwpck_require__(5667); class Settings { constructor(_options = {}) { this._options = _options; this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Infinity); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); @@ -5376,448 +6535,476 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), /***/ 334: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; return { type: "token", - token: token, + token, tokenType }; } -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ +// pkg/dist-src/with-authorization-prefix.js function withAuthorizationPrefix(token) { if (token.split(/\./).length === 3) { return `bearer ${token}`; } - return `token ${token}`; } +// pkg/dist-src/hook.js async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); + const endpoint = request.endpoint.merge( + route, + parameters + ); endpoint.headers.authorization = withAuthorizationPrefix(token); return request(endpoint); } -const createTokenAuth = function createTokenAuth(token) { +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { if (!token) { throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); } - token = token.replace(/^(token|bearer) +/i, ""); return Object.assign(auth.bind(null, token), { hook: hook.bind(null, token) }); }; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(5030); +var import_before_after_hook = __nccwpck_require__(3682); +var import_request = __nccwpck_require__(6234); +var import_graphql = __nccwpck_require__(8467); +var import_auth_token = __nccwpck_require__(334); + +// pkg/dist-src/version.js +var VERSION = "5.0.2"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; } - - return target; -} - -const VERSION = "3.2.1"; - -class Octokit { constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); + const hook = new import_before_after_hook.Collection(); const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type hook: hook.bind(null, "request") }), mediaType: { previews: [], format: "" } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; if (options.baseUrl) { requestDefaults.baseUrl = options.baseUrl; } - if (options.previews) { requestDefaults.mediaType.previews = options.previews; } - if (options.timeZone) { requestDefaults.headers["time-zone"] = options.timeZone; } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; if (!options.authStrategy) { if (!options.auth) { - // (1) this.auth = async () => ({ type: "unauthenticated" }); } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - + const auth = (0, import_auth_token.createTokenAuth)(options.auth); hook.wrap("request", auth.hook); this.auth = auth; } } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, ["authStrategy"]); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); hook.wrap("request", auth.hook); this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - + } const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "9.0.4"; -var isPlainObject = __nccwpck_require__(558); -var universalUserAgent = __nccwpck_require__(5030); +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; +// pkg/dist-src/util/lowercase-keys.js function lowercaseKeys(object) { if (!object) { return {}; } - return Object.keys(object).reduce((newObj, key) => { newObj[key.toLowerCase()] = object[key]; return newObj; }, {}); } +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js function mergeDeep(defaults, options) { const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); } else { - Object.assign(result, { - [key]: options[key] - }); + Object.assign(result, { [key]: options[key] }); } }); return result; } +// pkg/dist-src/util/remove-undefined-properties.js function removeUndefinedProperties(obj) { for (const key in obj) { - if (obj[key] === undefined) { + if (obj[key] === void 0) { delete obj[key]; } } - return obj; } +// pkg/dist-src/merge.js function merge(defaults, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); + options = Object.assign(url ? { method, url } : { url: method }, options); } else { options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - + } + options.headers = lowercaseKeys(options.headers); removeUndefinedProperties(options); removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); return mergedOptions; } +// pkg/dist-src/util/add-query-parameters.js function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); - if (names.length === 0) { return url; } - - return url + separator + names.map(name => { + return url + separator + names.map((name) => { if (name === "q") { return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - return `${name}=${encodeURIComponent(parameters[name])}`; }).join("&"); } -const urlVariableRegex = /\{[^}]+\}/g; - +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; function removeNonChars(variableName) { return variableName.replace(/^\W+|\W+$/g, "").split(/,/); } - function extractUrlVariableNames(url) { const matches = url.match(urlVariableRegex); - if (!matches) { return []; } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } +// pkg/dist-src/util/omit.js function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; } -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ +// pkg/dist-src/util/url-template.js function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - return part; }).join(""); } - function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } - function encodeValue(operator, value, key) { value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { return encodeUnreserved(key) + "=" + value; } else { return value; } } - function isDefined(value) { - return value !== undefined && value !== null; + return value !== void 0 && value !== null; } - function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } - function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - + var value = context[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); - if (modifier && modifier !== "*") { value = value.substring(0, parseInt(modifier, 10)); } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); } else { if (modifier === "*") { if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); }); } else { - Object.keys(value).forEach(function (k) { + Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } @@ -5825,20 +7012,18 @@ function getValues(context, operator, key, modifier) { } } else { const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); }); } else { - Object.keys(value).forEach(function (k) { + Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { tmp.push(encodeUnreserved(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } - if (isKeyOperator(operator)) { result.push(encodeUnreserved(key) + "=" + tmp.join(",")); } else if (tmp.length !== 0) { @@ -5857,89 +7042,93 @@ function getValues(context, operator, key, modifier) { result.push(""); } } - return result; } - function parseUrl(template) { return { expand: expand.bind(null, template) }; } - function expand(template, context) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - - return (values.length !== 0 ? operator : "") + values.join(separator); } else { - return values.join(","); + return encodeReserved(literal); } - } else { - return encodeReserved(literal); } - }); + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } } +// pkg/dist-src/parse.js function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - + let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); const urlVariableNames = extractUrlVariableNames(url); url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { url = options.baseUrl + url; } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); const remainingParameters = omit(parameters, omittedParameters); const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - + } if (["GET", "HEAD"].includes(method)) { url = addQueryParameters(url, remainingParameters); } else { @@ -5948,220 +7137,189 @@ function parse(options) { } else { if (Object.keys(remainingParameters).length) { body = remainingParameters; - } else { - headers["content-length"] = 0; } } - } // default content-type for JSON if body is set - - + } if (!headers["content-type"] && typeof body !== "undefined") { headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - + } if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); } +// pkg/dist-src/endpoint-with-defaults.js function endpointWithDefaults(defaults, route, options) { return parse(merge(defaults, route, options)); } +// pkg/dist-src/with-defaults.js function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), parse }); } -const VERSION = "6.0.9"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 558: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.5.7"; - -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - Object.assign(this, { - headers: response.headers - }); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(6234); +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "7.0.2"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(6234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(6234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } +}; -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (typeof query === "string" && options && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } - if (!result.variables) { result.variables = {}; } - result.variables[key] = parsedOptions[key]; return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } - - return request(requestOptions).then(response => { + return request2(requestOptions).then((response) => { if (response.data.errors) { const headers = {}; - for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; } - - throw new GraphqlError(requestOptions, { + throw new GraphqlResponseError( + requestOptions, headers, - data: response.data - }); + response.data + ); } - return response.data.data; }); } -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; - return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint + endpoint: newRequest.endpoint }); } -const graphql$1 = withDefaults(request.request, { +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` }, method: "POST", url: "/graphql" @@ -6172,45 +7330,59 @@ function withCustomRequest(customRequest) { url: "/graphql" }); } - -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 4193: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.6.0"; +// pkg/dist-src/version.js +var VERSION = "9.1.5"; -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ +// pkg/dist-src/normalize-paginated-list-response.js function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - + if (!responseNeedsNormalization) + return response; const incompleteResults = response.data.incomplete_results; const repositorySelection = response.data.repository_selection; const totalCount = response.data.total_count; @@ -6220,19 +7392,17 @@ function normalizePaginatedListResponse(response) { const namespaceKey = Object.keys(response.data)[0]; const data = response.data[namespaceKey]; response.data = data; - if (typeof incompleteResults !== "undefined") { response.data.incomplete_results = incompleteResults; } - if (typeof repositorySelection !== "undefined") { response.data.repository_selection = repositorySelection; } - response.data.total_count = totalCount; return response; } +// pkg/dist-src/iterator.js function iterator(octokit, route, parameters) { const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); const requestMethod = typeof route === "function" ? route : octokit.request; @@ -6242,68 +7412,315 @@ function iterator(octokit, route, parameters) { return { [Symbol.asyncIterator]: () => ({ async next() { - if (!url) return { - done: true - }; - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } } - }) }; } +// pkg/dist-src/paginate.js function paginate(octokit, route, parameters, mapFn) { if (typeof parameters === "function") { mapFn = parameters; - parameters = undefined; + parameters = void 0; } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); } - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { if (result.done) { return results; } - let earlyExit = false; - function done() { earlyExit = true; } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); if (earlyExit) { return results; } - - return gather(octokit, results, iterator, mapFn); + return gather(octokit, results, iterator2, mapFn); }); } -const composePaginateRest = Object.assign(paginate, { +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { iterator }); -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { @@ -6312,91 +7729,384 @@ function paginateRest(octokit) { }; } paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 3044: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "10.2.0"; -const Endpoints = { +// pkg/dist-src/generated/endpoints.js +var Endpoints = { actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], @@ -6407,20 +8117,26 @@ const Endpoints = { markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], @@ -6429,121 +8145,323 @@ const Endpoints = { getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] }, checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] - } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] - } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] - } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] - } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }] + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] }, - emojis: { - get: ["GET /emojis"] + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], @@ -6586,48 +8504,55 @@ const Endpoints = { getTemplate: ["GET /gitignore/templates/{name}"] }, interactions: { - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }] + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] }, issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], @@ -6639,28 +8564,38 @@ const Endpoints = { listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] }, licenses: { get: ["GET /licenses/{license}"], @@ -6669,546 +8604,740 @@ const Endpoints = { }, markdown: { render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] }, meta: { - get: ["GET /meta"] + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] }, migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] }, orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] }, projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] - } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] - } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }] + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] }, pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] - } - }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] }, + rateLimit: { get: ["GET /rate_limit"] }, reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }, { - deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }] + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] }, repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] - } - }], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", { - mediaType: { - previews: ["black-panther"] - } - }], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] - } - }], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], @@ -7219,530 +9348,821 @@ const Endpoints = { listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] - } - }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] }, search: { code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] - } - }], + commits: ["GET /search/commits"], issuesAndPullRequests: ["GET /search/issues"], labels: ["GET /search/labels"], repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { - mediaType: { - previews: ["mercy"] - } - }], + topics: ["GET /search/topics"], users: ["GET /search/users"] }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] - } - }], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { - addEmailForAuthenticated: ["POST /user/emails"], + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; - -const VERSION = "4.2.1"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { method, url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } - return newMethods; } - function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - + let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], - [decorations.mapToData]: undefined + [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } - if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; } - - delete options[name]; + delete options2[name]; } } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - + return requestWithDefaults(options2); + } return requestWithDefaults(...args); } - return Object.assign(withDecorations, requestWithDefaults); } -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ - +// pkg/dist-src/index.js function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); + const api = endpointsToMethods(octokit); + return { + rest: api + }; } restEndpointMethods.VERSION = VERSION; - -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(8932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - + super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } - this.name = "HttpError"; this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) }); } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(9440); +var import_universal_user_agent = __nccwpck_require__(5030); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +// pkg/dist-src/version.js +var VERSION = "8.1.6"; -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(9062); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} -const VERSION = "5.4.10"; +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(537); +// pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } +// pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + var _a, _b, _c; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } - let headers = {}; let status; let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { url = response.url; status = response.status; - for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } - + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } if (status === 204 || status === 205) { return; - } // GitHub API returns 200 for HEAD requests - - + } if (requestOptions.method === "HEAD") { if (status < 400) { return; } - - throw new requestError.RequestError(response.statusText, status, { - headers, + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, request: requestOptions }); } - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, request: requestOptions }); } - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - - throw error; + data + }, + request: requestOptions }); + throw error; } - - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); - }).then(data => { + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { return { status, url, headers, data }; - }).catch(error => { - if (error instanceof requestError.RequestError) { + }).catch((error) => { + if (error instanceof import_request_error.RequestError) throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } } - - throw new requestError.RequestError(error.message, 500, { - headers, + throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} +// pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + return fetchWrapper(endpoint2.parse(endpointOptions)); } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); - return endpointOptions.request.hook(request, endpointOptions); + return endpointOptions.request.hook(request2, endpointOptions); }; - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); } -const request = withDefaults(endpoint.endpoint, { +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9062: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), @@ -7782,6 +10202,7 @@ const objectTypeNames = [ 'Observable', 'Array', 'Buffer', + 'Blob', 'Object', 'RegExp', 'Date', @@ -7795,6 +10216,8 @@ const objectTypeNames = [ 'DataView', 'Promise', 'URL', + 'FormData', + 'URLSearchParams', 'HTMLElement', ...typedArrayTypeNames ]; @@ -7890,11 +10313,12 @@ is.array = (value, assertion) => { return value.every(assertion); }; is.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; }; +is.blob = (value) => isObjectOfType('Blob')(value); is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value)); is.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); }; is.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); }; -is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw); +is.generator = (value) => { var _a, _b; return is.iterable(value) && is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.next) && is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.throw); }; is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw); is.nativePromise = (value) => isObjectOfType('Promise')(value); const hasPromiseAPI = (value) => { @@ -7929,6 +10353,7 @@ is.bigUint64Array = isObjectOfType('BigUint64Array'); is.arrayBuffer = isObjectOfType('ArrayBuffer'); is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer'); is.dataView = isObjectOfType('DataView'); +is.enumCase = (value, targetEnum) => Object.values(targetEnum).includes(value); is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype; is.urlInstance = (value) => isObjectOfType('URL')(value); is.urlString = (value) => { @@ -7943,7 +10368,6 @@ is.urlString = (value) => { return false; } }; -// TODO: Use the `not` operator with a type guard here when it's available. // Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);` is.truthy = (value) => Boolean(value); // Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);` @@ -7953,7 +10377,7 @@ is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value); is.integer = (value) => Number.isInteger(value); is.safeInteger = (value) => Number.isSafeInteger(value); is.plainObject = (value) => { - // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js + // From: https://github.com/sindresorhus/is-plain-obj/blob/main/index.js if (toString.call(value) !== '[object Object]') { return false; } @@ -8009,10 +10433,12 @@ is.oddInteger = isAbsoluteMod2(1); is.emptyArray = (value) => is.array(value) && value.length === 0; is.nonEmptyArray = (value) => is.array(value) && value.length > 0; is.emptyString = (value) => is.string(value) && value.length === 0; -// TODO: Use `not ''` when the `not` operator is available. -is.nonEmptyString = (value) => is.string(value) && value.length > 0; const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value); is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value); +// TODO: Use `not ''` when the `not` operator is available. +is.nonEmptyString = (value) => is.string(value) && value.length > 0; +// TODO: Use `not ''` when the `not` operator is available. +is.nonEmptyStringAndNotWhitespace = (value) => is.string(value) && !is.emptyStringOrWhitespace(value); is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0; // TODO: Use `not` operator here to remove `Map` and `Set` from type guard: // - https://github.com/Microsoft/TypeScript/pull/29317 @@ -8021,6 +10447,10 @@ is.emptySet = (value) => is.set(value) && value.size === 0; is.nonEmptySet = (value) => is.set(value) && value.size > 0; is.emptyMap = (value) => is.map(value) && value.size === 0; is.nonEmptyMap = (value) => is.map(value) && value.size > 0; +// `PropertyKey` is any value that can be used as an object key (string, number, or symbol) +is.propertyKey = (value) => is.any([is.string, is.number, is.symbol], value); +is.formData = (value) => isObjectOfType('FormData')(value); +is.urlSearchParams = (value) => isObjectOfType('URLSearchParams')(value); const predicateOnArray = (method, predicate, values) => { if (!is.function_(predicate)) { throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`); @@ -8035,9 +10465,15 @@ is.any = (predicate, ...values) => { return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values)); }; is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values); -const assertType = (condition, description, value) => { +const assertType = (condition, description, value, options = {}) => { if (!condition) { - throw new TypeError(`Expected value which is \`${description}\`, received value of type \`${is(value)}\`.`); + const { multipleValues } = options; + const valuesMessage = multipleValues ? + `received values of types ${[ + ...new Set(value.map(singleValue => `\`${is(singleValue)}\``)) + ].join(', ')}` : + `received value of type \`${is(value)}\``; + throw new TypeError(`Expected value which is \`${description}\`, ${valuesMessage}.`); } }; exports.assert = { @@ -8061,6 +10497,7 @@ exports.assert = { } }, buffer: (value) => assertType(is.buffer(value), 'Buffer', value), + blob: (value) => assertType(is.blob(value), 'Blob', value), nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* nullOrUndefined */, value), object: (value) => assertType(is.object(value), 'Object', value), iterable: (value) => assertType(is.iterable(value), "Iterable" /* iterable */, value), @@ -8096,6 +10533,7 @@ exports.assert = { arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value), sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value), dataView: (value) => assertType(is.dataView(value), 'DataView', value), + enumCase: (value, targetEnum) => assertType(is.enumCase(value, targetEnum), 'EnumCase', value), urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value), urlString: (value) => assertType(is.urlString(value), "string with a URL" /* urlString */, value), truthy: (value) => assertType(is.truthy(value), "truthy" /* truthy */, value), @@ -8114,14 +10552,18 @@ exports.assert = { emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* emptyArray */, value), nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* nonEmptyArray */, value), emptyString: (value) => assertType(is.emptyString(value), "empty string" /* emptyString */, value), - nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* nonEmptyString */, value), emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* emptyStringOrWhitespace */, value), + nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* nonEmptyString */, value), + nonEmptyStringAndNotWhitespace: (value) => assertType(is.nonEmptyStringAndNotWhitespace(value), "non-empty string and not whitespace" /* nonEmptyStringAndNotWhitespace */, value), emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* emptyObject */, value), nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* nonEmptyObject */, value), emptySet: (value) => assertType(is.emptySet(value), "empty set" /* emptySet */, value), nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* nonEmptySet */, value), emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* emptyMap */, value), nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* nonEmptyMap */, value), + propertyKey: (value) => assertType(is.propertyKey(value), 'PropertyKey', value), + formData: (value) => assertType(is.formData(value), 'FormData', value), + urlSearchParams: (value) => assertType(is.urlSearchParams(value), 'URLSearchParams', value), // Numbers. evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* evenInteger */, value), oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* oddInteger */, value), @@ -8129,8 +10571,10 @@ exports.assert = { directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* directInstanceOf */, instance), inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* inRange */, value), // Variadic functions. - any: (predicate, ...values) => assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* any */, values), - all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* all */, values) + any: (predicate, ...values) => { + return assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* any */, values, { multipleValues: true }); + }, + all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* all */, values, { multipleValues: true }) }; // Some few keywords are reserved, but we'll populate them for Node.js users // See https://github.com/Microsoft/TypeScript/issues/2536 @@ -8156,10 +10600,10 @@ Object.defineProperties(exports.assert, { value: exports.assert.null_ } }); -exports.default = is; +exports["default"] = is; // For CommonJS default export support module.exports = is; -module.exports.default = is; +module.exports["default"] = is; module.exports.assert = exports.assert; @@ -8172,8 +10616,12 @@ module.exports.assert = exports.assert; Object.defineProperty(exports, "__esModule", ({ value: true })); const defer_to_connect_1 = __nccwpck_require__(6214); +const util_1 = __nccwpck_require__(3837); const nodejsMajorVersion = Number(process.versions.node.split('.')[0]); const timer = (request) => { + if (request.timings) { + return request.timings; + } const timings = { start: Date.now(), socket: undefined, @@ -8211,17 +10659,21 @@ const timer = (request) => { }; }; handleError(request); - request.prependOnceListener('abort', () => { + const onAbort = () => { timings.abort = Date.now(); // Let the `end` response event be responsible for setting the total phase, // unless the Node.js major version is >= 13. if (!timings.response || nodejsMajorVersion >= 13) { timings.phases.total = Date.now() - timings.start; } - }); + }; + request.prependOnceListener('abort', onAbort); const onSocket = (socket) => { timings.socket = Date.now(); timings.phases.wait = timings.socket - timings.start; + if (util_1.types.isProxy(socket)) { + return; + } const lookupListener = () => { timings.lookup = Date.now(); timings.phases.dns = timings.lookup - timings.socket; @@ -8254,7 +10706,7 @@ const timer = (request) => { const onUpload = () => { var _a; timings.upload = Date.now(); - timings.phases.request = timings.upload - (_a = timings.secureConnect, (_a !== null && _a !== void 0 ? _a : timings.connect)); + timings.phases.request = timings.upload - ((_a = timings.secureConnect) !== null && _a !== void 0 ? _a : timings.connect); }; const writableFinished = () => { if (typeof request.writableFinished === 'boolean') { @@ -8279,13 +10731,14 @@ const timer = (request) => { timings.phases.download = timings.end - timings.response; timings.phases.total = timings.end - timings.start; }); + response.prependOnceListener('aborted', onAbort); }); return timings; }; -exports.default = timer; +exports["default"] = timer; // For CommonJS default export support module.exports = timer; -module.exports.default = timer; +module.exports["default"] = timer; /***/ }), @@ -8293,292 +10746,311 @@ module.exports.default = timer; /***/ 6761: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Utils = __nccwpck_require__(5182); -var fs = Utils.FileSystem.require(), - pth = __nccwpck_require__(5622); +const Utils = __nccwpck_require__(5182); +const pth = __nccwpck_require__(1017); +const ZipEntry = __nccwpck_require__(4057); +const ZipFile = __nccwpck_require__(7744); -fs.existsSync = fs.existsSync || pth.existsSync; +const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); +const get_Str = (val, def) => (typeof val === "string" ? val : def); -var ZipEntry = __nccwpck_require__(4057), - ZipFile = __nccwpck_require__(7744); +const defaultOptions = { + // option "noSort" : if true it disables files sorting + noSort: false, + // read entries during load (initial loading may be slower) + readEntries: false, + // default method is none + method: Utils.Constants.NONE, + // file system + fs: null +}; -var isWin = /^win/.test(process.platform); +module.exports = function (/**String*/ input, /** object */ options) { + let inBuffer = null; -function canonical(p) { - var safeSuffix = pth.normalize(p).replace(/^(\.\.(\/|\\|$))+/, ''); - return pth.join('./', safeSuffix); -} + // create object based default options, allowing them to be overwritten + const opts = Object.assign(Object.create(null), defaultOptions); -module.exports = function (/**String*/input) { - var _zip = undefined, - _filename = ""; + // test input variable + if (input && "object" === typeof input) { + // if value is not buffer we accept it to be object with options + if (!(input instanceof Uint8Array)) { + Object.assign(opts, input); + input = opts.input ? opts.input : undefined; + if (opts.input) delete opts.input; + } - if (input && typeof input === "string") { // load zip file - if (fs.existsSync(input)) { - _filename = input; - _zip = new ZipFile(input, Utils.Constants.FILE); - } else { - throw new Error(Utils.Errors.INVALID_FILENAME); - } - } else if (input && Buffer.isBuffer(input)) { // load buffer - _zip = new ZipFile(input, Utils.Constants.BUFFER); - } else { // create new zip file - _zip = new ZipFile(null, Utils.Constants.NONE); - } + // if input is buffer + if (Buffer.isBuffer(input)) { + inBuffer = input; + opts.method = Utils.Constants.BUFFER; + input = undefined; + } + } - function sanitize(prefix, name) { - prefix = pth.resolve(pth.normalize(prefix)); - var parts = name.split('/'); - for (var i = 0, l = parts.length; i < l; i++) { - var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); - if (path.indexOf(prefix) === 0) { - return path; - } - } - return pth.normalize(pth.join(prefix, pth.basename(name))); - } + // assign options + Object.assign(opts, options); - function getEntry(/**Object*/entry) { - if (entry && _zip) { - var item; - // If entry was given as a file name - if (typeof entry === "string") - item = _zip.getEntry(entry); - // if entry was given as a ZipEntry object - if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") - item = _zip.getEntry(entry.entryName); - - if (item) { - return item; - } - } - return null; - } + // instanciate utils filesystem + const filetools = new Utils(opts); + + // if input is file name we retrieve its content + if (input && "string" === typeof input) { + // load zip file + if (filetools.fs.existsSync(input)) { + opts.method = Utils.Constants.FILE; + opts.filename = input; + inBuffer = filetools.fs.readFileSync(input); + } else { + throw new Error(Utils.Errors.INVALID_FILENAME); + } + } + + // create variable + const _zip = new ZipFile(inBuffer, opts); + + const { canonical, sanitize } = Utils; + + function getEntry(/**Object*/ entry) { + if (entry && _zip) { + var item; + // If entry was given as a file name + if (typeof entry === "string") item = _zip.getEntry(entry); + // if entry was given as a ZipEntry object + if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); + + if (item) { + return item; + } + } + return null; + } - function fixPath(zipPath){ + function fixPath(zipPath) { + const { join, normalize, sep } = pth.posix; // convert windows file separators and normalize - zipPath = pth.posix.normalize(zipPath.split("\\").join("/")); - // cleanup, remove invalid folder names - var names = zipPath.split("/").filter((c) => c !== "" && c !== "." && c !== ".."); - // if we have name we return it - return names.length ? names.join("/") + "/" : ""; + return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); } - return { - /** - * Extracts the given entry from the archive and returns the content as a Buffer object - * @param entry ZipEntry object or String with the full path of the entry - * - * @return Buffer or Null in case of error - */ - readFile: function (/**Object*/entry, /*String, Buffer*/pass) { - var item = getEntry(entry); - return item && item.getData(pass) || null; - }, + return { + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param entry ZipEntry object or String with the full path of the entry + * + * @return Buffer or Null in case of error + */ + readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { + var item = getEntry(entry); + return (item && item.getData(pass)) || null; + }, - /** - * Asynchronous readFile - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * - * @return Buffer or Null in case of error - */ - readFileAsync: function (/**Object*/entry, /**Function*/callback) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(callback); - } else { - callback(null, "getEntry failed for:" + entry) - } - }, + /** + * Asynchronous readFile + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * + * @return Buffer or Null in case of error + */ + readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(callback); + } else { + callback(null, "getEntry failed for:" + entry); + } + }, - /** - * Extracts the given entry from the archive and returns the content as plain text in the given encoding - * @param entry ZipEntry object or String with the full path of the entry - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsText: function (/**Object*/entry, /**String=*/encoding) { - var item = getEntry(entry); - if (item) { - var data = item.getData(); - if (data && data.length) { - return data.toString(encoding || "utf8"); - } - } - return ""; - }, + /** + * Extracts the given entry from the archive and returns the content as plain text in the given encoding + * @param entry ZipEntry object or String with the full path of the entry + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsText: function (/**Object*/ entry, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + var data = item.getData(); + if (data && data.length) { + return data.toString(encoding || "utf8"); + } + } + return ""; + }, - /** - * Asynchronous readAsText - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(function (data, err) { - if (err) { - callback(data, err); - return; - } + /** + * Asynchronous readAsText + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(function (data, err) { + if (err) { + callback(data, err); + return; + } - if (data && data.length) { - callback(data.toString(encoding || "utf8")); - } else { - callback(""); - } - }) - } else { - callback(""); - } - }, + if (data && data.length) { + callback(data.toString(encoding || "utf8")); + } else { + callback(""); + } + }); + } else { + callback(""); + } + }, - /** - * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory - * - * @param entry - */ - deleteFile: function (/**Object*/entry) { // @TODO: test deleteFile - var item = getEntry(entry); - if (item) { - _zip.deleteEntry(item.entryName); - } - }, + /** + * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory + * + * @param entry + */ + deleteFile: function (/**Object*/ entry) { + // @TODO: test deleteFile + var item = getEntry(entry); + if (item) { + _zip.deleteEntry(item.entryName); + } + }, - /** - * Adds a comment to the zip. The zip must be rewritten after adding the comment. - * - * @param comment - */ - addZipComment: function (/**String*/comment) { // @TODO: test addZipComment - _zip.comment = comment; - }, + /** + * Adds a comment to the zip. The zip must be rewritten after adding the comment. + * + * @param comment + */ + addZipComment: function (/**String*/ comment) { + // @TODO: test addZipComment + _zip.comment = comment; + }, - /** - * Returns the zip comment - * - * @return String - */ - getZipComment: function () { - return _zip.comment || ''; - }, + /** + * Returns the zip comment + * + * @return String + */ + getZipComment: function () { + return _zip.comment || ""; + }, - /** - * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment - * The comment cannot exceed 65535 characters in length - * - * @param entry - * @param comment - */ - addZipEntryComment: function (/**Object*/entry, /**String*/comment) { - var item = getEntry(entry); - if (item) { - item.comment = comment; - } - }, + /** + * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment + * The comment cannot exceed 65535 characters in length + * + * @param entry + * @param comment + */ + addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { + var item = getEntry(entry); + if (item) { + item.comment = comment; + } + }, - /** - * Returns the comment of the specified entry - * - * @param entry - * @return String - */ - getZipEntryComment: function (/**Object*/entry) { - var item = getEntry(entry); - if (item) { - return item.comment || ''; - } - return '' - }, + /** + * Returns the comment of the specified entry + * + * @param entry + * @return String + */ + getZipEntryComment: function (/**Object*/ entry) { + var item = getEntry(entry); + if (item) { + return item.comment || ""; + } + return ""; + }, - /** - * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content - * - * @param entry - * @param content - */ - updateFile: function (/**Object*/entry, /**Buffer*/content) { - var item = getEntry(entry); - if (item) { - item.setData(content); - } - }, + /** + * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content + * + * @param entry + * @param content + */ + updateFile: function (/**Object*/ entry, /**Buffer*/ content) { + var item = getEntry(entry); + if (item) { + item.setData(content); + } + }, - /** - * Adds a file from the disk to the archive - * - * @param localPath File to add to zip - * @param zipPath Optional path inside the zip - * @param zipName Optional name for the file - */ - addLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) { - if (fs.existsSync(localPath)) { - // fix ZipPath - zipPath = (zipPath) ? fixPath(zipPath) : ""; - - // p - local file name - var p = localPath.split("\\").join("/").split("/").pop(); - - // add file name into zippath - zipPath += (zipName) ? zipName : p; - - // read file attributes - const _attr = fs.statSync(localPath); - - // add file into zip file - this.addFile(zipPath, fs.readFileSync(localPath), comment, _attr) - } else { - throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } - }, + /** + * Adds a file from the disk to the archive + * + * @param localPath File to add to zip + * @param zipPath Optional path inside the zip + * @param zipName Optional name for the file + */ + addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { + if (filetools.fs.existsSync(localPath)) { + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // p - local file name + var p = localPath.split("\\").join("/").split("/").pop(); + + // add file name into zippath + zipPath += zipName ? zipName : p; + + // read file attributes + const _attr = filetools.fs.statSync(localPath); + + // add file into zip file + this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, - /** - * Adds a local directory and all its nested files and directories to the archive - * - * @param localPath - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - */ - addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) { + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param localPath + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter, /**=number|object*/ attr) { // Prepare filter - if (filter instanceof RegExp) { // if filter is RegExp wrap it - filter = (function (rx){ + if (filter instanceof RegExp) { + // if filter is RegExp wrap it + filter = (function (rx) { return function (filename) { return rx.test(filename); - } + }; })(filter); - } else if ('function' !== typeof filter) { // if filter is not function we will replace it + } else if ("function" !== typeof filter) { + // if filter is not function we will replace it filter = function () { return true; }; } // fix ZipPath - zipPath = (zipPath) ? fixPath(zipPath) : ""; + zipPath = zipPath ? fixPath(zipPath) : ""; // normalize the path first localPath = pth.normalize(localPath); - if (fs.existsSync(localPath)) { - - var items = Utils.findFiles(localPath), - self = this; + if (filetools.fs.existsSync(localPath)) { + const items = filetools.findFiles(localPath); + const self = this; if (items.length) { items.forEach(function (filepath) { var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix if (filter(p)) { - var stats = fs.statSync(filepath); + var stats = filetools.fs.statSync(filepath); if (stats.isFile()) { - self.addFile(zipPath + p, fs.readFileSync(filepath), "", stats); + self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", attr ? attr : stats); } else { - self.addFile(zipPath + p + '/', Buffer.alloc(0), "", stats); + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", attr ? attr : stats); } } }); @@ -8588,15 +11060,15 @@ module.exports = function (/**String*/input) { } }, - /** - * Asynchronous addLocalFile - * @param localPath - * @param callback - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - */ - addLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) { + /** + * Asynchronous addLocalFile + * @param localPath + * @param callback + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { if (filter instanceof RegExp) { filter = (function (rx) { return function (filename) { @@ -8616,13 +11088,13 @@ module.exports = function (/**String*/input) { localPath = pth.normalize(localPath); var self = this; - fs.open(localPath, 'r', function (err) { - if (err && err.code === 'ENOENT') { + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); } else if (err) { callback(undefined, err); } else { - var items = Utils.findFiles(localPath); + var items = filetools.findFiles(localPath); var i = -1; var next = function () { @@ -8630,12 +11102,15 @@ module.exports = function (/**String*/input) { if (i < items.length) { var filepath = items[i]; var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix - p = p.normalize('NFD').replace(/[\u0300-\u036f]/g, '').replace(/[^\x20-\x7E]/g, '') // accent fix + p = p + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix if (filter(p)) { - fs.stat(filepath, function (er0, stats) { + filetools.fs.stat(filepath, function (er0, stats) { if (er0) callback(undefined, er0); if (stats.isFile()) { - fs.readFile(filepath, function (er1, data) { + filetools.fs.readFile(filepath, function (er1, data) { if (er1) { callback(undefined, er1); } else { @@ -8649,327 +11124,389 @@ module.exports = function (/**String*/input) { } }); } else { - next(); + process.nextTick(() => { + next(); + }); } - } else { callback(true, undefined); } - } + }; next(); } }); }, - addLocalFolderPromise: function (/*String*/ localPath, /* object */ options) { + /** + * + * @param {string} localPath - path where files will be extracted + * @param {object} props - optional properties + * @param {string} props.zipPath - optional path inside zip + * @param {regexp, function} props.filter - RegExp or Function if files match will be included. + */ + addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { return new Promise((resolve, reject) => { - const { filter, zipPath } = Object.assign({}, options); - this.addLocalFolderAsync(localPath, + const { filter, zipPath } = Object.assign({}, props); + this.addLocalFolderAsync( + localPath, (done, err) => { if (err) reject(err); if (done) resolve(this); - }, zipPath, filter + }, + zipPath, + filter ); }); }, - /** - * Allows you to create a entry (file or directory) in the zip file. - * If you want to create a directory the entryName must end in / and a null buffer should be provided. - * Comment and attributes are optional - * - * @param entryName - * @param content - * @param comment - * @param attr - */ - addFile: function (/**String*/entryName, /**Buffer*/content, /**String*/comment, /**Number*/attr) { - // prepare new entry - var entry = new ZipEntry(); - entry.entryName = entryName; - entry.comment = comment || ""; - - var isStat = ('object' === typeof attr) && (attr instanceof fs.Stats); - - // last modification time from file stats - if (isStat){ - entry.header.time = attr.mtime; - } + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the entryName must end in / and a null buffer should be provided. + * Comment and attributes are optional + * + * @param {string} entryName + * @param {Buffer | string} content - file content as buffer or utf8 coded string + * @param {string} comment - file comment + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { + let entry = getEntry(entryName); + const update = entry != null; - // Set file attribute - var fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag) + // prepare new entry + if (!update) { + entry = new ZipEntry(); + entry.entryName = entryName; + } + entry.comment = comment || ""; - // extended attributes field for Unix - if('win32' !== process.platform){ - // set file type either S_IFDIR / S_IFREG - var unix = (entry.isDirectory) ? 0x4000 : 0x8000; + const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; - if (isStat) { // File attributes from file stats - unix |= (0xfff & attr.mode); - }else if ('number' === typeof attr){ // attr from given attr values - unix |= (0xfff & attr); - }else{ // Default values: - unix |= (entry.isDirectory) ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) - } + // last modification time from file stats + if (isStat) { + entry.header.time = attr.mtime; + } - fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes - } + // Set file attribute + var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) - entry.attr = fileattr; + // extended attributes field for Unix + // set file type either S_IFDIR / S_IFREG + let unix = entry.isDirectory ? 0x4000 : 0x8000; - entry.setData(content); - _zip.setEntry(entry); - }, + if (isStat) { + // File attributes from file stats + unix |= 0xfff & attr.mode; + } else if ("number" === typeof attr) { + // attr from given attr values + unix |= 0xfff & attr; + } else { + // Default values: + unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) + } - /** - * Returns an array of ZipEntry objects representing the files and folders inside the archive - * - * @return Array - */ - getEntries: function () { - if (_zip) { - return _zip.entries; - } else { - return []; - } - }, + fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes - /** - * Returns a ZipEntry object representing the file or folder specified by ``name``. - * - * @param name - * @return ZipEntry - */ - getEntry: function (/**String*/name) { - return getEntry(name); - }, + entry.attr = fileattr; - getEntryCount: function() { - return _zip.getEntryCount(); - }, + entry.setData(content); + if (!update) _zip.setEntry(entry); + }, - forEach: function(callback) { - return _zip.forEach(callback); - }, + /** + * Returns an array of ZipEntry objects representing the files and folders inside the archive + * + * @return Array + */ + getEntries: function () { + return _zip ? _zip.entries : []; + }, - /** - * Extracts the given entry to the given targetPath - * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted - * - * @param entry ZipEntry object or String with the full path of the entry - * @param targetPath Target folder where to write the file - * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder - * will be created in targetPath as well. Default is TRUE - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE + /** + * Returns a ZipEntry object representing the file or folder specified by ``name``. + * + * @param name + * @return ZipEntry + */ + getEntry: function (/**String*/ name) { + return getEntry(name); + }, + + getEntryCount: function () { + return _zip.getEntryCount(); + }, + + forEach: function (callback) { + return _zip.forEach(callback); + }, + + /** + * Extracts the given entry to the given targetPath + * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted + * + * @param entry ZipEntry object or String with the full path of the entry + * @param targetPath Target folder where to write the file + * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder + * will be created in targetPath as well. Default is TRUE + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) - * - * @return Boolean - */ - extractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) { - overwrite = overwrite || false; - maintainEntryPath = typeof maintainEntryPath === "undefined" ? true : maintainEntryPath; - - var item = getEntry(entry); - if (!item) { - throw new Error(Utils.Errors.NO_ENTRY); - } + * + * @return Boolean + */ + extractEntryTo: function ( + /**Object*/ entry, + /**String*/ targetPath, + /**Boolean*/ maintainEntryPath, + /**Boolean*/ overwrite, + /**Boolean*/ keepOriginalPermission, + /**String**/ outFileName + ) { + overwrite = get_Bool(overwrite, false); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + maintainEntryPath = get_Bool(maintainEntryPath, true); + outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); - var entryName = canonical(item.entryName); + var item = getEntry(entry); + if (!item) { + throw new Error(Utils.Errors.NO_ENTRY); + } - var target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName))); + var entryName = canonical(item.entryName); - if (item.isDirectory) { - target = pth.resolve(target, ".."); - var children = _zip.getEntryChildren(item); - children.forEach(function (child) { - if (child.isDirectory) return; - var content = child.getData(); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - var name = canonical(child.entryName) - var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); - // The reverse operation for attr depend on method addFile() - var fileAttr = child.attr ? (((child.attr >>> 0) | 0) >> 16) & 0xfff : 0; - Utils.writeFileTo(childName, content, overwrite, fileAttr); - }); - return true; - } + var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); + + if (item.isDirectory) { + var children = _zip.getEntryChildren(item); + children.forEach(function (child) { + if (child.isDirectory) return; + var content = child.getData(); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + var name = canonical(child.entryName); + var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; + filetools.writeFileTo(childName, content, overwrite, fileAttr); + }); + return true; + } - var content = item.getData(); - if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + var content = item.getData(); + if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - if (fs.existsSync(target) && !overwrite) { - throw new Error(Utils.Errors.CANT_OVERRIDE); - } - // The reverse operation for attr depend on method addFile() - var fileAttr = item.attr ? (((item.attr >>> 0) | 0) >> 16) & 0xfff : 0; - Utils.writeFileTo(target, content, overwrite, fileAttr); + if (filetools.fs.existsSync(target) && !overwrite) { + throw new Error(Utils.Errors.CANT_OVERRIDE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(target, content, overwrite, fileAttr); - return true; - }, + return true; + }, - /** - * Test the archive - * - */ - test: function (pass) { - if (!_zip) { - return false; - } + /** + * Test the archive + * + */ + test: function (pass) { + if (!_zip) { + return false; + } - for (var entry in _zip.entries) { - try { - if (entry.isDirectory) { - continue; - } - var content = _zip.entries[entry].getData(pass); - if (!content) { - return false; - } - } catch (err) { - return false; - } - } - return true; - }, + for (var entry in _zip.entries) { + try { + if (entry.isDirectory) { + continue; + } + var content = _zip.entries[entry].getData(pass); + if (!content) { + return false; + } + } catch (err) { + return false; + } + } + return true; + }, - /** - * Extracts the entire archive to the given location - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - */ - extractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) { - overwrite = overwrite || false; - if (!_zip) { - throw new Error(Utils.Errors.NO_ZIP); - } - _zip.entries.forEach(function (entry) { - var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); - if (entry.isDirectory) { - Utils.makeDir(entryName); - return; - } - var content = entry.getData(pass); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - // The reverse operation for attr depend on method addFile() - var fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0; - Utils.writeFileTo(entryName, content, overwrite, fileAttr); - try { - fs.utimesSync(entryName, entry.header.time, entry.header.time) - } catch (err) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - }) - }, + /** + * Extracts the entire archive to the given location + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + */ + extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { + overwrite = get_Bool(overwrite, false); + pass = get_Str(keepOriginalPermission, pass); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + throw new Error(Utils.Errors.NO_ZIP); + } + _zip.entries.forEach(function (entry) { + var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); + if (entry.isDirectory) { + filetools.makeDir(entryName); + return; + } + var content = entry.getData(pass); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(entryName, content, overwrite, fileAttr); + try { + filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); + } catch (err) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + }); + }, - /** - * Asynchronous extractAllTo - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - * @param callback - */ - extractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) { - if (!callback) { - callback = function() {} - } - overwrite = overwrite || false; - if (!_zip) { - callback(new Error(Utils.Errors.NO_ZIP)); - return; - } + /** + * Asynchronous extractAllTo + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. + */ + extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { + overwrite = get_Bool(overwrite, false); + if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!callback) { + callback = function (err) { + throw new Error(err); + }; + } + if (!_zip) { + callback(new Error(Utils.Errors.NO_ZIP)); + return; + } - var entries = _zip.entries; - var i = entries.length; - entries.forEach(function (entry) { - if (i <= 0) return; // Had an error already + targetPath = pth.resolve(targetPath); + // convert entryName to + const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); + const getError = (msg, file) => new Error(msg + ': "' + file + '"'); - var entryName = pth.normalize(canonical(entry.entryName.toString())); + // separate directories from files + const dirEntries = []; + const fileEntries = new Set(); + _zip.entries.forEach((e) => { + if (e.isDirectory) { + dirEntries.push(e); + } else { + fileEntries.add(e); + } + }); - if (entry.isDirectory) { - Utils.makeDir(sanitize(targetPath, entryName)); - if (--i === 0) - callback(undefined); - return; - } - entry.getDataAsync(function (content, err) { - if (i <= 0) return; - if (err) { - callback(new Error(err)); - return; - } - if (!content) { - i = 0; - callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); - return; - } + // Create directory entries first synchronously + // this prevents race condition and assures folders are there before writing files + for (const entry of dirEntries) { + const dirPath = getPath(entry); + // The reverse operation for attr depend on method addFile() + const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + try { + filetools.makeDir(dirPath); + if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); + // in unix timestamp will change if files are later added to folder, but still + filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); + } catch (er) { + callback(getError("Unable to create folder", dirPath)); + } + } - // The reverse operation for attr depend on method addFile() - var fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0; - Utils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, fileAttr, function (succ) { - try { - fs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time); - } catch (err) { - callback(new Error('Unable to set utimes')); - } - if (i <= 0) return; - if (!succ) { - i = 0; - callback(new Error('Unable to write')); - return; - } - if (--i === 0) - callback(undefined); - }); - }); - }) - }, + // callback wrapper, for some house keeping + const done = () => { + if (fileEntries.size === 0) { + callback(); + } + }; - /** - * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip - * - * @param targetFileName - * @param callback - */ - writeZip: function (/**String*/targetFileName, /**Function*/callback) { - if (arguments.length === 1) { - if (typeof targetFileName === "function") { - callback = targetFileName; - targetFileName = ""; - } - } + // Extract file entries asynchronously + for (const entry of fileEntries.values()) { + const entryName = pth.normalize(canonical(entry.entryName.toString())); + const filePath = sanitize(targetPath, entryName); + entry.getDataAsync(function (content, err_1) { + if (err_1) { + callback(new Error(err_1)); + return; + } + if (!content) { + callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); + } else { + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { + if (!succ) { + callback(getError("Unable to write file", filePath)); + return; + } + filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { + if (err_2) { + callback(getError("Unable to set times", filePath)); + return; + } + fileEntries.delete(entry); + // call the callback if it was last entry + done(); + }); + }); + } + }); + } + // call the callback if fileEntries was empty + done(); + }, - if (!targetFileName && _filename) { - targetFileName = _filename; - } - if (!targetFileName) return; + /** + * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip + * + * @param targetFileName + * @param callback + */ + writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { + if (arguments.length === 1) { + if (typeof targetFileName === "function") { + callback = targetFileName; + targetFileName = ""; + } + } - var zipData = _zip.compressToBuffer(); - if (zipData) { - var ok = Utils.writeFileTo(targetFileName, zipData, true); - if (typeof callback === 'function') callback(!ok ? new Error("failed") : null, ""); - } - }, + if (!targetFileName && opts.filename) { + targetFileName = opts.filename; + } + if (!targetFileName) return; + + var zipData = _zip.compressToBuffer(); + if (zipData) { + var ok = filetools.writeFileTo(targetFileName, zipData, true); + if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); + } + }, - writeZipPromise: function (/**String*/ targetFileName, /* object */ options) { - const { overwrite, perm } = Object.assign({ overwrite: true }, options); + writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { + const { overwrite, perm } = Object.assign({ overwrite: true }, props); return new Promise((resolve, reject) => { // find file name - if (!targetFileName && _filename) targetFileName = _filename; + if (!targetFileName && opts.filename) targetFileName = opts.filename; if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); this.toBufferPromise().then((zipData) => { const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); - Utils.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); + filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); }, reject); }); }, @@ -8980,20 +11517,20 @@ module.exports = function (/**String*/input) { }); }, - /** - * Returns the content of the entire zip file as a Buffer object - * - * @return Buffer - */ - toBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) { - this.valueOf = 2; - if (typeof onSuccess === "function") { - _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); - return null; - } - return _zip.compressToBuffer() - } - } + /** + * Returns the content of the entire zip file as a Buffer object + * + * @return Buffer + */ + toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { + this.valueOf = 2; + if (typeof onSuccess === "function") { + _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); + return null; + } + return _zip.compressToBuffer(); + } + }; }; @@ -9007,8 +11544,8 @@ var Utils = __nccwpck_require__(5182), /* The central directory file header */ module.exports = function () { - var _verMade = 0x14, - _version = 0x0A, + var _verMade = 20, // v2.0 + _version = 10, // v1.0 _flags = 0, _method = 0, _time = 0, @@ -9017,51 +11554,61 @@ module.exports = function () { _size = 0, _fnameLen = 0, _extraLen = 0, - _comLen = 0, _diskStart = 0, _inattr = 0, _attr = 0, _offset = 0; - switch(process.platform){ - case 'win32': - _verMade |= 0x0A00; - case 'darwin': - _verMade |= 0x1300; - default: - _verMade |= 0x0300; - } + _verMade |= Utils.isWin ? 0x0a00 : 0x0300; + + // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. + // Without it file names may be corrupted for other apps when file names use unicode chars + _flags |= Constants.FLG_EFS; var _dataHeader = {}; function setTime(val) { val = new Date(val); - _time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980 - | (val.getMonth() + 1) << 21 // b05-08 month - | val.getDate() << 16 // b00-04 hour - + _time = + (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 + ((val.getMonth() + 1) << 21) | // b05-08 month + (val.getDate() << 16) | // b00-04 hour // 2 bytes time - | val.getHours() << 11 // b11-15 hour - | val.getMinutes() << 5 // b05-10 minute - | val.getSeconds() >> 1; // b00-04 seconds divided by 2 + (val.getHours() << 11) | // b11-15 hour + (val.getMinutes() << 5) | // b05-10 minute + (val.getSeconds() >> 1); // b00-04 seconds divided by 2 } setTime(+new Date()); return { - get made () { return _verMade; }, - set made (val) { _verMade = val; }, + get made() { + return _verMade; + }, + set made(val) { + _verMade = val; + }, - get version () { return _version; }, - set version (val) { _version = val }, + get version() { + return _version; + }, + set version(val) { + _version = val; + }, - get flags () { return _flags }, - set flags (val) { _flags = val; }, + get flags() { + return _flags; + }, + set flags(val) { + _flags = val; + }, - get method () { return _method; }, - set method (val) { - switch (val){ + get method() { + return _method; + }, + set method(val) { + switch (val) { case Constants.STORED: this.version = 10; case Constants.DEFLATED: @@ -9069,66 +11616,107 @@ module.exports = function () { this.version = 20; } _method = val; - }, + }, - get time () { return new Date( - ((_time >> 25) & 0x7f) + 1980, - ((_time >> 21) & 0x0f) - 1, - (_time >> 16) & 0x1f, - (_time >> 11) & 0x1f, - (_time >> 5) & 0x3f, - (_time & 0x1f) << 1 - ); + get time() { + return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); }, - set time (val) { + set time(val) { setTime(val); }, - get crc () { return _crc; }, - set crc (val) { _crc = val; }, + get crc() { + return _crc; + }, + set crc(val) { + _crc = Math.max(0, val) >>> 0; + }, - get compressedSize () { return _compressedSize; }, - set compressedSize (val) { _compressedSize = val; }, + get compressedSize() { + return _compressedSize; + }, + set compressedSize(val) { + _compressedSize = Math.max(0, val) >>> 0; + }, - get size () { return _size; }, - set size (val) { _size = val; }, + get size() { + return _size; + }, + set size(val) { + _size = Math.max(0, val) >>> 0; + }, - get fileNameLength () { return _fnameLen; }, - set fileNameLength (val) { _fnameLen = val; }, + get fileNameLength() { + return _fnameLen; + }, + set fileNameLength(val) { + _fnameLen = val; + }, - get extraLength () { return _extraLen }, - set extraLength (val) { _extraLen = val; }, + get extraLength() { + return _extraLen; + }, + set extraLength(val) { + _extraLen = val; + }, - get commentLength () { return _comLen }, - set commentLength (val) { _comLen = val }, + get commentLength() { + return _comLen; + }, + set commentLength(val) { + _comLen = val; + }, + + get diskNumStart() { + return _diskStart; + }, + set diskNumStart(val) { + _diskStart = Math.max(0, val) >>> 0; + }, - get diskNumStart () { return _diskStart }, - set diskNumStart (val) { _diskStart = val }, + get inAttr() { + return _inattr; + }, + set inAttr(val) { + _inattr = Math.max(0, val) >>> 0; + }, - get inAttr () { return _inattr }, - set inAttr (val) { _inattr = val }, + get attr() { + return _attr; + }, + set attr(val) { + _attr = Math.max(0, val) >>> 0; + }, - get attr () { return _attr }, - set attr (val) { _attr = val }, + // get Unix file permissions + get fileAttr() { + return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; + }, - get offset () { return _offset }, - set offset (val) { _offset = val }, + get offset() { + return _offset; + }, + set offset(val) { + _offset = Math.max(0, val) >>> 0; + }, - get encripted () { return (_flags & 1) === 1 }, + get encripted() { + return (_flags & 1) === 1; + }, - get entryHeaderSize () { + get entryHeaderSize() { return Constants.CENHDR + _fnameLen + _extraLen + _comLen; }, - get realDataOffset () { + get realDataOffset() { return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; }, - get dataHeader () { + get dataHeader() { return _dataHeader; }, - loadDataHeaderFromBinary : function(/*Buffer*/input) { + loadDataHeaderFromBinary: function (/*Buffer*/ input) { var data = input.slice(_offset, _offset + Constants.LOCHDR); // 30 bytes and should start with "PK\003\004" if (data.readUInt32LE(0) !== Constants.LOCSIG) { @@ -9136,27 +11724,27 @@ module.exports = function () { } _dataHeader = { // version needed to extract - version : data.readUInt16LE(Constants.LOCVER), + version: data.readUInt16LE(Constants.LOCVER), // general purpose bit flag - flags : data.readUInt16LE(Constants.LOCFLG), + flags: data.readUInt16LE(Constants.LOCFLG), // compression method - method : data.readUInt16LE(Constants.LOCHOW), + method: data.readUInt16LE(Constants.LOCHOW), // modification time (2 bytes time, 2 bytes date) - time : data.readUInt32LE(Constants.LOCTIM), + time: data.readUInt32LE(Constants.LOCTIM), // uncompressed file crc-32 value - crc : data.readUInt32LE(Constants.LOCCRC), + crc: data.readUInt32LE(Constants.LOCCRC), // compressed size - compressedSize : data.readUInt32LE(Constants.LOCSIZ), + compressedSize: data.readUInt32LE(Constants.LOCSIZ), // uncompressed size - size : data.readUInt32LE(Constants.LOCLEN), + size: data.readUInt32LE(Constants.LOCLEN), // filename length - fnameLen : data.readUInt16LE(Constants.LOCNAM), + fnameLen: data.readUInt16LE(Constants.LOCNAM), // extra field length - extraLen : data.readUInt16LE(Constants.LOCEXT) - } + extraLen: data.readUInt16LE(Constants.LOCEXT) + }; }, - loadFromBinary : function(/*Buffer*/data) { + loadFromBinary: function (/*Buffer*/ data) { // data should be 46 bytes and start with "PK 01 02" if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { throw new Error(Utils.Errors.INVALID_CEN); @@ -9193,7 +11781,7 @@ module.exports = function () { _offset = data.readUInt32LE(Constants.CENOFF); }, - dataHeaderToBinary : function() { + dataHeaderToBinary: function () { // LOC header size (30 bytes) var data = Buffer.alloc(Constants.LOCHDR); // "PK\003\004" @@ -9219,7 +11807,7 @@ module.exports = function () { return data; }, - entryHeaderToBinary : function() { + entryHeaderToBinary: function () { // CEN header size (46 bytes) var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); // "PK\001\002" @@ -9259,27 +11847,35 @@ module.exports = function () { return data; }, - toString : function() { - return '{\n' + - '\t"made" : ' + _verMade + ",\n" + - '\t"version" : ' + _version + ",\n" + - '\t"flags" : ' + _flags + ",\n" + - '\t"method" : ' + Utils.methodToString(_method) + ",\n" + - '\t"time" : ' + this.time + ",\n" + - '\t"crc" : 0x' + _crc.toString(16).toUpperCase() + ",\n" + - '\t"compressedSize" : ' + _compressedSize + " bytes,\n" + - '\t"size" : ' + _size + " bytes,\n" + - '\t"fileNameLength" : ' + _fnameLen + ",\n" + - '\t"extraLength" : ' + _extraLen + " bytes,\n" + - '\t"commentLength" : ' + _comLen + " bytes,\n" + - '\t"diskNumStart" : ' + _diskStart + ",\n" + - '\t"inAttr" : ' + _inattr + ",\n" + - '\t"attr" : ' + _attr + ",\n" + - '\t"offset" : ' + _offset + ",\n" + - '\t"entryHeaderSize" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + " bytes\n" + - '}'; + toJSON: function () { + const bytes = function (nr) { + return nr + " bytes"; + }; + + return { + made: _verMade, + version: _version, + flags: _flags, + method: Utils.methodToString(_method), + time: this.time, + crc: "0x" + _crc.toString(16).toUpperCase(), + compressedSize: bytes(_compressedSize), + size: bytes(_size), + fileNameLength: bytes(_fnameLen), + extraLength: bytes(_extraLen), + commentLength: bytes(_comLen), + diskNumStart: _diskStart, + inAttr: _inattr, + attr: _attr, + offset: _offset, + entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; @@ -9289,12 +11885,12 @@ module.exports = function () { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { exports.EntryHeader = __nccwpck_require__(9032); -exports.MainHeader = __nccwpck_require__(4408); +exports.MainHeader = __nccwpck_require__(8952); /***/ }), -/***/ 4408: +/***/ 8952: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Utils = __nccwpck_require__(5182), @@ -9309,31 +11905,52 @@ module.exports = function () { _commentLength = 0; return { - get diskEntries () { return _volumeEntries }, - set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; }, + get diskEntries() { + return _volumeEntries; + }, + set diskEntries(/*Number*/ val) { + _volumeEntries = _totalEntries = val; + }, - get totalEntries () { return _totalEntries }, - set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; }, + get totalEntries() { + return _totalEntries; + }, + set totalEntries(/*Number*/ val) { + _totalEntries = _volumeEntries = val; + }, - get size () { return _size }, - set size (/*Number*/val) { _size = val; }, + get size() { + return _size; + }, + set size(/*Number*/ val) { + _size = val; + }, - get offset () { return _offset }, - set offset (/*Number*/val) { _offset = val; }, + get offset() { + return _offset; + }, + set offset(/*Number*/ val) { + _offset = val; + }, - get commentLength () { return _commentLength }, - set commentLength (/*Number*/val) { _commentLength = val; }, + get commentLength() { + return _commentLength; + }, + set commentLength(/*Number*/ val) { + _commentLength = val; + }, - get mainHeaderSize () { + get mainHeaderSize() { return Constants.ENDHDR + _commentLength; }, - loadFromBinary : function(/*Buffer*/data) { + loadFromBinary: function (/*Buffer*/ data) { // data should be 22 bytes and start with "PK 05 06" // or be 56+ bytes and start with "PK 06 06" for Zip64 - if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && - (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) { - + if ( + (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && + (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) + ) { throw new Error(Utils.Errors.INVALID_END); } @@ -9354,17 +11971,16 @@ module.exports = function () { // total number of entries _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); // central directory size in bytes - _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ); + _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); // offset of first CEN header _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); _commentLength = 0; } - }, - toBinary : function() { - var b = Buffer.alloc(Constants.ENDHDR + _commentLength); + toBinary: function () { + var b = Buffer.alloc(Constants.ENDHDR + _commentLength); // "PK 05 06" signature b.writeUInt32LE(Constants.ENDSIG, 0); b.writeUInt32LE(0, 4); @@ -9384,53 +12000,67 @@ module.exports = function () { return b; }, - toString : function() { - return '{\n' + - '\t"diskEntries" : ' + _volumeEntries + ",\n" + - '\t"totalEntries" : ' + _totalEntries + ",\n" + - '\t"size" : ' + _size + " bytes,\n" + - '\t"offset" : 0x' + _offset.toString(16).toUpperCase() + ",\n" + - '\t"commentLength" : 0x' + _commentLength + "\n" + - '}'; + toJSON: function () { + // creates 0x0000 style output + const offset = function (nr, len) { + let offs = nr.toString(16).toUpperCase(); + while (offs.length < len) offs = "0" + offs; + return "0x" + offs; + }; + + return { + diskEntries: _volumeEntries, + totalEntries: _totalEntries, + size: _size + " bytes", + offset: offset(_offset, 4), + commentLength: _commentLength + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; + // Misspelled /***/ }), /***/ 7686: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function (/*Buffer*/inbuf) { +module.exports = function (/*Buffer*/ inbuf) { + var zlib = __nccwpck_require__(9796); - var zlib = __nccwpck_require__(8761); - - var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024}; - - return { - deflate: function () { - return zlib.deflateRawSync(inbuf, opts); - }, + var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; - deflateAsync: function (/*Function*/callback) { - var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0; - tmp.on('data', function (data) { - parts.push(data); - total += data.length; - }); - tmp.on('end', function () { - var buf = Buffer.alloc(total), written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; - } - callback && callback(buf); - }); - tmp.end(inbuf); - } - } + return { + deflate: function () { + return zlib.deflateRawSync(inbuf, opts); + }, + + deflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createDeflateRaw(opts), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; }; @@ -9443,124 +12073,221 @@ exports.Deflater = __nccwpck_require__(7686); exports.Inflater = __nccwpck_require__(2153); exports.ZipCrypto = __nccwpck_require__(3228); + /***/ }), /***/ 2153: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function (/*Buffer*/inbuf) { - - var zlib = __nccwpck_require__(8761); +module.exports = function (/*Buffer*/ inbuf) { + var zlib = __nccwpck_require__(9796); - return { - inflate: function () { - return zlib.inflateRawSync(inbuf); - }, + return { + inflate: function () { + return zlib.inflateRawSync(inbuf); + }, - inflateAsync: function (/*Function*/callback) { - var tmp = zlib.createInflateRaw(), parts = [], total = 0; - tmp.on('data', function (data) { - parts.push(data); - total += data.length; - }); - tmp.on('end', function () { - var buf = Buffer.alloc(total), written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; - } - callback && callback(buf); - }); - tmp.end(inbuf); - } - } + inflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createInflateRaw(), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; }; /***/ }), /***/ 3228: -/***/ ((module) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// node crypt, we use it for generate salt +// eslint-disable-next-line node/no-unsupported-features/node-builtins +const { randomFillSync } = __nccwpck_require__(6113); // generate CRC32 lookup table -const crctable = (new Uint32Array(256)).map((t,crc)=>{ - for(let j=0;j<8;j++){ - if (0 !== (crc & 1)){ - crc = (crc >>> 1) ^ 0xEDB88320 - }else{ - crc >>>= 1 +const crctable = new Uint32Array(256).map((t, crc) => { + for (let j = 0; j < 8; j++) { + if (0 !== (crc & 1)) { + crc = (crc >>> 1) ^ 0xedb88320; + } else { + crc >>>= 1; } } - return crc>>>0; + return crc >>> 0; }); -function make_decrypter(/*Buffer*/pwd){ - // C-style uInt32 Multiply - const uMul = (a,b) => Math.imul(a, b) >>> 0; - // Initialize keys with default values - const keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); - // crc32 byte update - const crc32update = (pCrc32, bval) => { - return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) +const uMul = (a, b) => Math.imul(a, b) >>> 0; + +// crc32 byte single update (actually same function is part of utils.crc32 function :) ) +const crc32update = (pCrc32, bval) => { + return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +}; + +// function for generating salt for encrytion header +const genSalt = () => { + if ("function" === typeof randomFillSync) { + return randomFillSync(Buffer.alloc(12)); + } else { + // fallback if function is not defined + return genSalt.node(); } - // update keys with byteValues - const updateKeys = (byteValue) => { - keys[0] = crc32update(keys[0], byteValue); - keys[1] += keys[0] & 0xff; - keys[1] = uMul(keys[1], 134775813) + 1; - keys[2] = crc32update(keys[2], keys[1] >>> 24); +}; + +// salt generation with node random function (mainly as fallback) +genSalt.node = () => { + const salt = Buffer.alloc(12); + const len = salt.length; + for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; + return salt; +}; + +// general config +const config = { + genSalt +}; + +// Class Initkeys handles same basic ops with keys +function Initkeys(pw) { + const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); + this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); + for (let i = 0; i < pass.length; i++) { + this.updateKeys(pass[i]); } +} + +Initkeys.prototype.updateKeys = function (byteValue) { + const keys = this.keys; + keys[0] = crc32update(keys[0], byteValue); + keys[1] += keys[0] & 0xff; + keys[1] = uMul(keys[1], 134775813) + 1; + keys[2] = crc32update(keys[2], keys[1] >>> 24); + return byteValue; +}; +Initkeys.prototype.next = function () { + const k = (this.keys[2] | 2) >>> 0; // key + return (uMul(k, k ^ 1) >> 8) & 0xff; // decode +}; + +function make_decrypter(/*Buffer*/ pwd) { // 1. Stage initialize key - const pass = (Buffer.isBuffer(pwd)) ? pwd : Buffer.from(pwd); - for(let i=0; i< pass.length; i++){ - updateKeys(pass[i]); - } + const keys = new Initkeys(pwd); // return decrypter function - return function (/*Buffer*/data){ - if (!Buffer.isBuffer(data)){ - throw 'decrypter needs Buffer' - } + return function (/*Buffer*/ data) { // result - we create new Buffer for results const result = Buffer.alloc(data.length); let pos = 0; // process input data - for(let c of data){ - const k = (keys[2] | 2) >>> 0; // key - c ^= (uMul(k, k^1) >> 8) & 0xff; // decode - result[pos++] = c; // Save Value - updateKeys(c); // update keys with decoded byte + for (let c of data) { + //c ^= keys.next(); + //result[pos++] = c; // decode & Save Value + result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte } return result; - } + }; } -function decrypt(/*Buffer*/ data, /*Object*/header, /*String, Buffer*/ pwd){ +function make_encrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return encrypting function, result and pos is here so we dont have to merge buffers later + return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { + // result - we create new Buffer for results + if (!result) result = Buffer.alloc(data.length); + // process input data + for (let c of data) { + const k = keys.next(); // save key byte + result[pos++] = c ^ k; // save val + keys.updateKeys(c); // update keys with decoded byte + } + return result; + }; +} + +function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { if (!data || !Buffer.isBuffer(data) || data.length < 12) { return Buffer.alloc(0); } - - // We Initialize and generate decrypting function + + // 1. We Initialize and generate decrypting function const decrypter = make_decrypter(pwd); - // check - for testing password - const check = header.crc >>> 24; - // decrypt salt what is always 12 bytes and is a part of file content - const testbyte = decrypter(data.slice(0, 12))[11]; + // 2. decrypt salt what is always 12 bytes and is a part of file content + const salt = decrypter(data.slice(0, 12)); - // does password meet expectations - if (check !== testbyte){ - throw 'ADM-ZIP: Wrong Password'; + // 3. does password meet expectations + if (salt[11] !== header.crc >>> 24) { + throw "ADM-ZIP: Wrong Password"; } - // decode content + // 4. decode content return decrypter(data.slice(12)); } -module.exports = {decrypt}; +// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality +function _salter(data) { + if (Buffer.isBuffer(data) && data.length >= 12) { + // be aware - currently salting buffer data is modified + config.genSalt = function () { + return data.slice(0, 12); + }; + } else if (data === "node") { + // test salt generation with node random function + config.genSalt = genSalt.node; + } else { + // if value is not acceptable config gets reset. + config.genSalt = genSalt; + } +} + +function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { + // 1. test data if data is not Buffer we make buffer from it + if (data == null) data = Buffer.alloc(0); + // if data is not buffer be make buffer from it + if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); + + // 2. We Initialize and generate encrypting function + const encrypter = make_encrypter(pwd); + + // 3. generate salt (12-bytes of random data) + const salt = config.genSalt(); + salt[11] = (header.crc >>> 24) & 0xff; + + // old implementations (before PKZip 2.04g) used two byte check + if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; + + // 4. create output + const result = Buffer.alloc(data.length + 12); + encrypter(salt, result); + + // finally encode content + return encrypter(data, result, 12); +} + +module.exports = { decrypt, encrypt, _salter }; /***/ }), @@ -9645,32 +12372,39 @@ module.exports = { REDUCED3 : 4, // reduced with compression factor 3 REDUCED4 : 5, // reduced with compression factor 4 IMPLODED : 6, // imploded - // 7 reserved + // 7 reserved for Tokenizing compression algorithm DEFLATED : 8, // deflated ENHANCED_DEFLATED: 9, // enhanced deflated PKWARE : 10,// PKWare DCL imploded - // 11 reserved + // 11 reserved by PKWARE BZIP2 : 12, // compressed using BZIP2 - // 13 reserved + // 13 reserved by PKWARE LZMA : 14, // LZMA - // 15-17 reserved + // 15-17 reserved by PKWARE IBM_TERSE : 18, // compressed using IBM TERSE - IBM_LZ77 : 19, //IBM LZ77 z + IBM_LZ77 : 19, // IBM LZ77 z + AES_ENCRYPT : 99, // WinZIP AES encryption method /* General purpose bit flag */ - FLG_ENC : 0, // encripted file - FLG_COMP1 : 1, // compression option - FLG_COMP2 : 2, // compression option - FLG_DESC : 4, // data descriptor - FLG_ENH : 8, // enhanced deflation - FLG_STR : 16, // strong encryption - FLG_LNG : 1024, // language encoding + // values can obtained with expression 2**bitnr + FLG_ENC : 1, // Bit 0: encrypted file + FLG_COMP1 : 2, // Bit 1, compression option + FLG_COMP2 : 4, // Bit 2, compression option + FLG_DESC : 8, // Bit 3, data descriptor + FLG_ENH : 16, // Bit 4, enhanced deflating + FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. + FLG_STR : 64, // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. FLG_MSK : 4096, // mask header values /* Load type */ - FILE : 0, + FILE : 2, BUFFER : 1, - NONE : 2, + NONE : 0, /* 4.5 Extensible data fields */ EF_ID : 0, @@ -9712,66 +12446,65 @@ module.exports = { module.exports = { /* Header error messages */ - "INVALID_LOC" : "Invalid LOC header (bad signature)", - "INVALID_CEN" : "Invalid CEN header (bad signature)", - "INVALID_END" : "Invalid END header (bad signature)", + INVALID_LOC: "Invalid LOC header (bad signature)", + INVALID_CEN: "Invalid CEN header (bad signature)", + INVALID_END: "Invalid END header (bad signature)", /* ZipEntry error messages*/ - "NO_DATA" : "Nothing to decompress", - "BAD_CRC" : "CRC32 checksum failed", - "FILE_IN_THE_WAY" : "There is a file in the way: %s", - "UNKNOWN_METHOD" : "Invalid/unsupported compression method", + NO_DATA: "Nothing to decompress", + BAD_CRC: "CRC32 checksum failed", + FILE_IN_THE_WAY: "There is a file in the way: %s", + UNKNOWN_METHOD: "Invalid/unsupported compression method", /* Inflater error messages */ - "AVAIL_DATA" : "inflate::Available inflate data did not terminate", - "INVALID_DISTANCE" : "inflate::Invalid literal/length or distance code in fixed or dynamic block", - "TO_MANY_CODES" : "inflate::Dynamic block code description: too many length or distance codes", - "INVALID_REPEAT_LEN" : "inflate::Dynamic block code description: repeat more than specified lengths", - "INVALID_REPEAT_FIRST" : "inflate::Dynamic block code description: repeat lengths with no first length", - "INCOMPLETE_CODES" : "inflate::Dynamic block code description: code lengths codes incomplete", - "INVALID_DYN_DISTANCE": "inflate::Dynamic block code description: invalid distance code lengths", - "INVALID_CODES_LEN": "inflate::Dynamic block code description: invalid literal/length code lengths", - "INVALID_STORE_BLOCK" : "inflate::Stored block length did not match one's complement", - "INVALID_BLOCK_TYPE" : "inflate::Invalid block type (type == 3)", + AVAIL_DATA: "inflate::Available inflate data did not terminate", + INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", + TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", + INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", + INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", + INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", + INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", + INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", + INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", + INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", /* ADM-ZIP error messages */ - "CANT_EXTRACT_FILE" : "Could not extract the file", - "CANT_OVERRIDE" : "Target file already exists", - "NO_ZIP" : "No zip file was loaded", - "NO_ENTRY" : "Entry doesn't exist", - "DIRECTORY_CONTENT_ERROR" : "A directory cannot have content", - "FILE_NOT_FOUND" : "File not found: %s", - "NOT_IMPLEMENTED" : "Not implemented", - "INVALID_FILENAME" : "Invalid filename", - "INVALID_FORMAT" : "Invalid or unsupported zip format. No END header found" + CANT_EXTRACT_FILE: "Could not extract the file", + CANT_OVERRIDE: "Target file already exists", + NO_ZIP: "No zip file was loaded", + NO_ENTRY: "Entry doesn't exist", + DIRECTORY_CONTENT_ERROR: "A directory cannot have content", + FILE_NOT_FOUND: "File not found: %s", + NOT_IMPLEMENTED: "Not implemented", + INVALID_FILENAME: "Invalid filename", + INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" }; + /***/ }), /***/ 8321: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var fs = __nccwpck_require__(2895).require(), - pth = __nccwpck_require__(5622); - -fs.existsSync = fs.existsSync || pth.existsSync; +const fs = (__nccwpck_require__(2895).require)(); +const pth = __nccwpck_require__(1017); -module.exports = function(/*String*/path) { +fs.existsSync = fs.existsSync || pth.existsSync; +module.exports = function (/*String*/ path) { var _path = path || "", - _permissions = 0, _obj = newAttr(), _stat = null; function newAttr() { return { - directory : false, - readonly : false, - hidden : false, - executable : false, - mtime : 0, - atime : 0 - } + directory: false, + readonly: false, + hidden: false, + executable: false, + mtime: 0, + atime: 0 + }; } if (_path && fs.existsSync(_path)) { @@ -9779,61 +12512,58 @@ module.exports = function(/*String*/path) { _obj.directory = _stat.isDirectory(); _obj.mtime = _stat.mtime; _obj.atime = _stat.atime; - _obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner - _obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right + _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner + _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right _obj.hidden = pth.basename(_path)[0] === "."; } else { - console.warn("Invalid path: " + _path) + console.warn("Invalid path: " + _path); } return { - - get directory () { + get directory() { return _obj.directory; }, - get readOnly () { + get readOnly() { return _obj.readonly; }, - get hidden () { + get hidden() { return _obj.hidden; }, - get mtime () { + get mtime() { return _obj.mtime; }, - get atime () { - return _obj.atime; + get atime() { + return _obj.atime; }, - - get executable () { + get executable() { return _obj.executable; }, - decodeAttributes : function(val) { - - }, + decodeAttributes: function () {}, - encodeAttributes : function (val) { + encodeAttributes: function () {}, + toJSON: function () { + return { + path: _path, + isDirectory: _obj.directory, + isReadOnly: _obj.readonly, + isHidden: _obj.hidden, + isExecutable: _obj.executable, + mTime: _obj.mtime, + aTime: _obj.atime + }; }, - toString : function() { - return '{\n' + - '\t"path" : "' + _path + ",\n" + - '\t"isDirectory" : ' + _obj.directory + ",\n" + - '\t"isReadOnly" : ' + _obj.readonly + ",\n" + - '\t"isHidden" : ' + _obj.hidden + ",\n" + - '\t"isExecutable" : ' + _obj.executable + ",\n" + - '\t"mTime" : ' + _obj.mtime + "\n" + - '\t"aTime" : ' + _obj.atime + "\n" + - '}'; + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } - + }; }; @@ -9842,17 +12572,16 @@ module.exports = function(/*String*/path) { /***/ 2895: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.require = function() { - var fs = __nccwpck_require__(5747); - if (process && process.versions && process.versions['electron']) { - try { - originalFs = __nccwpck_require__(2941); - if (Object.keys(originalFs).length > 0) { - fs = originalFs; - } - } catch (e) {} - } - return fs +exports.require = function () { + if (typeof process === "object" && process.versions && process.versions["electron"]) { + try { + const originalFs = __nccwpck_require__(2941); + if (Object.keys(originalFs).length > 0) { + return originalFs; + } + } catch (e) {} + } + return __nccwpck_require__(7147); }; @@ -9862,234 +12591,263 @@ exports.require = function() { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(1291); -module.exports.FileSystem = __nccwpck_require__(2895); module.exports.Constants = __nccwpck_require__(4522); module.exports.Errors = __nccwpck_require__(1255); module.exports.FileAttr = __nccwpck_require__(8321); + /***/ }), /***/ 1291: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var fs = __nccwpck_require__(2895).require(), - pth = __nccwpck_require__(5622); +const fsystem = (__nccwpck_require__(2895).require)(); +const pth = __nccwpck_require__(1017); +const Constants = __nccwpck_require__(4522); +const Errors = __nccwpck_require__(1255); +const isWin = typeof process === "object" && "win32" === process.platform; -fs.existsSync = fs.existsSync || pth.existsSync; +const is_Obj = (obj) => obj && typeof obj === "object"; -module.exports = (function() { +// generate CRC32 lookup table +const crcTable = new Uint32Array(256).map((t, c) => { + for (let k = 0; k < 8; k++) { + if ((c & 1) !== 0) { + c = 0xedb88320 ^ (c >>> 1); + } else { + c >>>= 1; + } + } + return c >>> 0; +}); - var crcTable = [], - Constants = __nccwpck_require__(4522), - Errors = __nccwpck_require__(1255), +// UTILS functions - PATH_SEPARATOR = pth.sep; +function Utils(opts) { + this.sep = pth.sep; + this.fs = fsystem; + if (is_Obj(opts)) { + // custom filesystem + if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { + this.fs = opts.fs; + } + } +} + +module.exports = Utils; + +// INSTANCED functions - function mkdirSync(/*String*/path) { - var resolvedPath = path.split(PATH_SEPARATOR)[0]; - path.split(PATH_SEPARATOR).forEach(function(name) { - if (!name || name.substr(-1,1) === ":") return; - resolvedPath += PATH_SEPARATOR + name; +Utils.prototype.makeDir = function (/*String*/ folder) { + const self = this; + + // Sync - make directories tree + function mkdirSync(/*String*/ fpath) { + let resolvedPath = fpath.split(self.sep)[0]; + fpath.split(self.sep).forEach(function (name) { + if (!name || name.substr(-1, 1) === ":") return; + resolvedPath += self.sep + name; var stat; try { - stat = fs.statSync(resolvedPath); + stat = self.fs.statSync(resolvedPath); } catch (e) { - fs.mkdirSync(resolvedPath); + self.fs.mkdirSync(resolvedPath); } - if (stat && stat.isFile()) - throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); + if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); }); } - function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) { - if (typeof pattern === 'boolean') { - recoursive = pattern; - pattern = undefined; - } - var files = []; - fs.readdirSync(dir).forEach(function(file) { - var path = pth.join(dir, file); - - if (fs.statSync(path).isDirectory() && recoursive) - files = files.concat(findSync(path, pattern, recoursive)); + mkdirSync(folder); +}; - if (!pattern || pattern.test(path)) { - files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : "")); - } +Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { + const self = this; + if (self.fs.existsSync(path)) { + if (!overwrite) return false; // cannot overwrite - }); - return files; + var stat = self.fs.statSync(path); + if (stat.isDirectory()) { + return false; + } + } + var folder = pth.dirname(path); + if (!self.fs.existsSync(folder)) { + self.makeDir(folder); } - function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) { - var slice = Buffer.from(buffer.slice(index, index + 8)); - slice.swap64(); + var fd; + try { + fd = self.fs.openSync(path, "w", 438); // 0666 + } catch (e) { + self.fs.chmodSync(path, 438); + fd = self.fs.openSync(path, "w", 438); + } + if (fd) { + try { + self.fs.writeSync(fd, content, 0, content.length, 0); + } finally { + self.fs.closeSync(fd); + } + } + self.fs.chmodSync(path, attr || 438); + return true; +}; - return parseInt(`0x${ slice.toString('hex') }`); +Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { + if (typeof attr === "function") { + callback = attr; + attr = undefined; } - return { - makeDir : function(/*String*/path) { - mkdirSync(path); - }, + const self = this; - crc32 : function(buf) { - if (typeof buf === 'string') { - buf = Buffer.alloc(buf.length, buf); - } - var b = Buffer.alloc(4); - if (!crcTable.length) { - for (var n = 0; n < 256; n++) { - var c = n; - for (var k = 8; --k >= 0;) // - if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; } - if (c < 0) { - b.writeInt32LE(c, 0); - c = b.readUInt32LE(0); - } - crcTable[n] = c; - } - } - var crc = 0, off = 0, len = buf.length, c1 = ~crc; - while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8); - crc = ~c1; - b.writeInt32LE(crc & 0xffffffff, 0); - return b.readUInt32LE(0); - }, + self.fs.exists(path, function (exist) { + if (exist && !overwrite) return callback(false); - methodToString : function(/*Number*/method) { - switch (method) { - case Constants.STORED: - return 'STORED (' + method + ')'; - case Constants.DEFLATED: - return 'DEFLATED (' + method + ')'; - default: - return 'UNSUPPORTED (' + method + ')'; + self.fs.stat(path, function (err, stat) { + if (exist && stat.isDirectory()) { + return callback(false); } - }, + var folder = pth.dirname(path); + self.fs.exists(folder, function (exists) { + if (!exists) self.makeDir(folder); + + self.fs.open(path, "w", 438, function (err, fd) { + if (err) { + self.fs.chmod(path, 438, function () { + self.fs.open(path, "w", 438, function (err, fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + }); + }); + } else if (fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + } else { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + } + }); + }); + }); + }); +}; - writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) { - if (fs.existsSync(path)) { - if (!overwrite) - return false; // cannot overwrite +Utils.prototype.findFiles = function (/*String*/ path) { + const self = this; - var stat = fs.statSync(path); - if (stat.isDirectory()) { - return false; - } - } - var folder = pth.dirname(path); - if (!fs.existsSync(folder)) { - mkdirSync(folder); - } + function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { + if (typeof pattern === "boolean") { + recursive = pattern; + pattern = undefined; + } + let files = []; + self.fs.readdirSync(dir).forEach(function (file) { + var path = pth.join(dir, file); - var fd; - try { - fd = fs.openSync(path, 'w', 438); // 0666 - } catch(e) { - fs.chmodSync(path, 438); - fd = fs.openSync(path, 'w', 438); - } - if (fd) { - try { - fs.writeSync(fd, content, 0, content.length, 0); - } - catch (e){ - throw e; - } - finally { - fs.closeSync(fd); - } - } - fs.chmodSync(path, attr || 438); - return true; - }, + if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); - writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) { - if(typeof attr === 'function') { - callback = attr; - attr = undefined; + if (!pattern || pattern.test(path)) { + files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); } + }); + return files; + } - fs.exists(path, function(exists) { - if(exists && !overwrite) - return callback(false); + return findSync(path, undefined, true); +}; - fs.stat(path, function(err, stat) { - if(exists &&stat.isDirectory()) { - return callback(false); - } +Utils.prototype.getAttributes = function () {}; - var folder = pth.dirname(path); - fs.exists(folder, function(exists) { - if(!exists) - mkdirSync(folder); - - fs.open(path, 'w', 438, function(err, fd) { - if(err) { - fs.chmod(path, 438, function() { - fs.open(path, 'w', 438, function(err, fd) { - fs.write(fd, content, 0, content.length, 0, function() { - fs.close(fd, function() { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - }); - }); - }); - }) - } else { - if(fd) { - fs.write(fd, content, 0, content.length, 0, function() { - fs.close(fd, function() { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - }); - }); - } else { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - } - } - }); - }) - }) - }) - }, +Utils.prototype.setAttributes = function () {}; - findFiles : function(/*String*/path) { - return findSync(path, true); - }, +// STATIC functions - getAttributes : function(/*String*/path) { +// crc32 single update (it is part of crc32) +Utils.crc32update = function (crc, byte) { + return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); +}; - }, +Utils.crc32 = function (buf) { + if (typeof buf === "string") { + buf = Buffer.from(buf, "utf8"); + } + // Generate crcTable + if (!crcTable.length) genCRCTable(); - setAttributes : function(/*String*/path) { + let len = buf.length; + let crc = ~0; + for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); + // xor and cast as uint32 number + return ~crc >>> 0; +}; - }, +Utils.methodToString = function (/*Number*/ method) { + switch (method) { + case Constants.STORED: + return "STORED (" + method + ")"; + case Constants.DEFLATED: + return "DEFLATED (" + method + ")"; + default: + return "UNSUPPORTED (" + method + ")"; + } +}; - toBuffer : function(input) { - if (Buffer.isBuffer(input)) { - return input; - } else { - if (input.length === 0) { - return Buffer.alloc(0) - } - return Buffer.from(input, 'utf8'); - } - }, +// removes ".." style path elements +Utils.canonical = function (/*string*/ path) { + if (!path) return ""; + // trick normalize think path is absolute + var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.join(".", safeSuffix); +}; - readBigUInt64LE, +// make abolute paths taking prefix as root folder +Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { + prefix = pth.resolve(pth.normalize(prefix)); + var parts = name.split("/"); + for (var i = 0, l = parts.length; i < l; i++) { + var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); + if (path.indexOf(prefix) === 0) { + return path; + } + } + return pth.normalize(pth.join(prefix, pth.basename(name))); +}; - Constants : Constants, - Errors : Errors +// converts buffer, Uint8Array, string types to buffer +Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { + if (Buffer.isBuffer(input)) { + return input; + } else if (input instanceof Uint8Array) { + return Buffer.from(input); + } else { + // expect string all other values are invalid and return empty buffer + return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); } -})(); +}; + +Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { + var slice = Buffer.from(buffer.slice(index, index + 8)); + slice.swap64(); + + return parseInt(`0x${slice.toString("hex")}`); +}; + +Utils.isWin = isWin; // Do we have windows system +Utils.crcTable = crcTable; /***/ }), @@ -10102,8 +12860,7 @@ var Utils = __nccwpck_require__(5182), Constants = Utils.Constants, Methods = __nccwpck_require__(3928); -module.exports = function (/*Buffer*/input) { - +module.exports = function (/*Buffer*/ input) { var _entryHeader = new Headers.EntryHeader(), _entryName = Buffer.alloc(0), _comment = Buffer.alloc(0), @@ -10116,15 +12873,15 @@ module.exports = function (/*Buffer*/input) { return Buffer.alloc(0); } _entryHeader.loadDataHeaderFromBinary(input); - return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize) + return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); } function crc32OK(data) { // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written if ((_entryHeader.flags & 0x8) !== 0x8) { - if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { - return false; - } + if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { + return false; + } } else { // @TODO: load and check data descriptor header // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure @@ -10133,10 +12890,10 @@ module.exports = function (/*Buffer*/input) { return true; } - function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) { - if(typeof callback === 'undefined' && typeof async === 'string') { - pass=async; - async=void 0; + function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { + if (typeof callback === "undefined" && typeof async === "string") { + pass = async; + async = void 0; } if (_isDirectory) { if (async && callback) { @@ -10153,9 +12910,9 @@ module.exports = function (/*Buffer*/input) { return compressedData; } - if (_entryHeader.encripted){ - if ('string' !== typeof pass && !Buffer.isBuffer(pass)){ - throw new Error('ADM-ZIP: Incompatible password parameter'); + if (_entryHeader.encripted) { + if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { + throw new Error("ADM-ZIP: Incompatible password parameter"); } compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); } @@ -10166,30 +12923,33 @@ module.exports = function (/*Buffer*/input) { case Utils.Constants.STORED: compressedData.copy(data); if (!crc32OK(data)) { - if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error + if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error throw new Error(Utils.Errors.BAD_CRC); - } else {//si added otherwise did not seem to return data. + } else { + //si added otherwise did not seem to return data. if (async && callback) callback(data); return data; } case Utils.Constants.DEFLATED: var inflater = new Methods.Inflater(compressedData); if (!async) { - var result = inflater.inflate(data); + const result = inflater.inflate(data); result.copy(data, 0); if (!crc32OK(data)) { throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); } return data; } else { - inflater.inflateAsync(function(result) { - result.copy(data, 0); - if (!crc32OK(data)) { - if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error - } else { //si added otherwise did not seem to return data. - if (callback) callback(data); + inflater.inflateAsync(function (result) { + result.copy(result, 0); + if (callback) { + if (!crc32OK(result)) { + callback(result, Utils.Errors.BAD_CRC); //si added error + } else { + callback(result); + } } - }) + }); } break; default: @@ -10198,7 +12958,7 @@ module.exports = function (/*Buffer*/input) { } } - function compress(/*Boolean*/async, /*Function*/callback) { + function compress(/*Boolean*/ async, /*Function*/ callback) { if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { // no data set or the data wasn't changed to require recompression if (async && callback) callback(getCompressedDataFromZip()); @@ -10219,29 +12979,26 @@ module.exports = function (/*Buffer*/input) { return compressedData; default: case Utils.Constants.DEFLATED: - var deflater = new Methods.Deflater(uncompressedData); if (!async) { var deflated = deflater.deflate(); _entryHeader.compressedSize = deflated.length; return deflated; } else { - deflater.deflateAsync(function(data) { + deflater.deflateAsync(function (data) { compressedData = Buffer.alloc(data.length); _entryHeader.compressedSize = data.length; data.copy(compressedData); callback && callback(compressedData); - }) + }); } deflater = null; break; } + } else if (async && callback) { + callback(Buffer.alloc(0)); } else { - if (async && callback) { - callback(Buffer.alloc(0)); - } else { - return Buffer.alloc(0); - } + return Buffer.alloc(0); } } @@ -10252,14 +13009,14 @@ module.exports = function (/*Buffer*/input) { function parseExtra(data) { var offset = 0; var signature, size, part; - while(offset= Constants.EF_ZIP64_SCOMP) { + if (data.length >= Constants.EF_ZIP64_SCOMP) { size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); - if(_entryHeader.size === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { _entryHeader.size = size; } } - if(data.length >= Constants.EF_ZIP64_RHO) { + if (data.length >= Constants.EF_ZIP64_RHO) { compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); - if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { _entryHeader.compressedSize = compressedSize; } } - if(data.length >= Constants.EF_ZIP64_DSN) { + if (data.length >= Constants.EF_ZIP64_DSN) { offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); - if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { _entryHeader.offset = offset; } } - if(data.length >= Constants.EF_ZIP64_DSN+4) { + if (data.length >= Constants.EF_ZIP64_DSN + 4) { diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); - if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { _entryHeader.diskNumStart = diskNumStart; } } } - return { - get entryName () { return _entryName.toString(); }, - get rawEntryName() { return _entryName; }, - set entryName (val) { + get entryName() { + return _entryName.toString(); + }, + get rawEntryName() { + return _entryName; + }, + set entryName(val) { _entryName = Utils.toBuffer(val); var lastChar = _entryName[_entryName.length - 1]; - _isDirectory = (lastChar === 47) || (lastChar === 92); + _isDirectory = lastChar === 47 || lastChar === 92; _entryHeader.fileNameLength = _entryName.length; }, - get extra () { return _extra; }, - set extra (val) { + get extra() { + return _extra; + }, + set extra(val) { _extra = val; _entryHeader.extraLength = val.length; parseExtra(val); }, - get comment () { return _comment.toString(); }, - set comment (val) { + get comment() { + return _comment.toString(); + }, + set comment(val) { _comment = Utils.toBuffer(val); _entryHeader.commentLength = _comment.length; }, - get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); }, - get isDirectory () { return _isDirectory }, + get name() { + var n = _entryName.toString(); + return _isDirectory + ? n + .substr(n.length - 1) + .split("/") + .pop() + : n.split("/").pop(); + }, + get isDirectory() { + return _isDirectory; + }, - getCompressedData : function() { - return compress(false, null) + getCompressedData: function () { + return compress(false, null); }, - getCompressedDataAsync : function(/*Function*/callback) { - compress(true, callback) + getCompressedDataAsync: function (/*Function*/ callback) { + compress(true, callback); }, - setData : function(value) { + setData: function (value) { uncompressedData = Utils.toBuffer(value); if (!_isDirectory && uncompressedData.length) { _entryHeader.size = uncompressedData.length; _entryHeader.method = Utils.Constants.DEFLATED; _entryHeader.crc = Utils.crc32(value); _entryHeader.changed = true; - } else { // folders and blank files should be stored + } else { + // folders and blank files should be stored _entryHeader.method = Utils.Constants.STORED; } }, - getData : function(pass) { + getData: function (pass) { if (_entryHeader.changed) { return uncompressedData; } else { @@ -10350,7 +13125,7 @@ module.exports = function (/*Buffer*/input) { } }, - getDataAsync : function(/*Function*/callback, pass) { + getDataAsync: function (/*Function*/ callback, pass) { if (_entryHeader.changed) { callback(uncompressedData); } else { @@ -10358,10 +13133,14 @@ module.exports = function (/*Buffer*/input) { } }, - set attr(attr) { _entryHeader.attr = attr; }, - get attr() { return _entryHeader.attr; }, + set attr(attr) { + _entryHeader.attr = attr; + }, + get attr() { + return _entryHeader.attr; + }, - set header(/*Buffer*/data) { + set header(/*Buffer*/ data) { _entryHeader.loadFromBinary(data); }, @@ -10369,7 +13148,7 @@ module.exports = function (/*Buffer*/input) { return _entryHeader; }, - packHeader : function() { + packHeader: function () { // 1. create header (buffer) var header = _entryHeader.entryHeaderToBinary(); var addpos = Utils.Constants.CENHDR; @@ -10388,18 +13167,26 @@ module.exports = function (/*Buffer*/input) { return header; }, - toString : function() { - return '{\n' + - '\t"entryName" : "' + _entryName.toString() + "\",\n" + - '\t"name" : "' + (_isDirectory ? _entryName.toString().replace(/\/$/, '').split("/").pop() : _entryName.toString().split("/").pop()) + "\",\n" + - '\t"comment" : "' + _comment.toString() + "\",\n" + - '\t"isDirectory" : ' + _isDirectory + ",\n" + - '\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t").replace(/}/mg, "\t}") + ",\n" + - '\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" + - '\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" + - '}'; + toJSON: function () { + const bytes = function (nr) { + return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; + }; + + return { + entryName: this.entryName, + name: this.name, + comment: this.comment, + isDirectory: this.isDirectory, + header: _entryHeader.toJSON(), + compressedData: bytes(input), + data: bytes(uncompressedData) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; @@ -10408,413 +13195,389 @@ module.exports = function (/*Buffer*/input) { /***/ 7744: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var ZipEntry = __nccwpck_require__(4057), - Headers = __nccwpck_require__(4958), - Utils = __nccwpck_require__(5182); - -module.exports = function (/*String|Buffer*/input, /*Number*/inputType) { - var entryList = [], - entryTable = {}, - _comment = Buffer.alloc(0), - filename = "", - fs = Utils.FileSystem.require(), - inBuffer = null, - mainHeader = new Headers.MainHeader(), - loadedEntries = false; - - if (inputType === Utils.Constants.FILE) { - // is a filename - filename = input; - inBuffer = fs.readFileSync(filename); - readMainHeader(); - } else if (inputType === Utils.Constants.BUFFER) { - // is a memory buffer - inBuffer = input; - readMainHeader(); - } else { - // none. is a new file - loadedEntries = true; - } - - function iterateEntries(callback) { - const totalEntries = mainHeader.diskEntries; // total number of entries - let index = mainHeader.offset; // offset of first CEN header - - for (let i = 0; i < totalEntries; i++) { - let tmp = index; - const entry = new ZipEntry(inBuffer); - - entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR); - entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength); - - index += entry.header.entryHeaderSize; - - callback(entry); - } - } - - function readEntries() { - loadedEntries = true; - entryTable = {}; - entryList = new Array(mainHeader.diskEntries); // total number of entries - var index = mainHeader.offset; // offset of first CEN header - for (var i = 0; i < entryList.length; i++) { - - var tmp = index, - entry = new ZipEntry(inBuffer); - entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR); +const ZipEntry = __nccwpck_require__(4057); +const Headers = __nccwpck_require__(4958); +const Utils = __nccwpck_require__(5182); - entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength); +module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { + var entryList = [], + entryTable = {}, + _comment = Buffer.alloc(0), + mainHeader = new Headers.MainHeader(), + loadedEntries = false; - if (entry.header.extraLength) { - entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength); - } + // assign options + const opts = Object.assign(Object.create(null), options); - if (entry.header.commentLength) - entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); + const { noSort } = opts; - index += entry.header.entryHeaderSize; + if (inBuffer) { + // is a memory buffer + readMainHeader(opts.readEntries); + } else { + // none. is a new file + loadedEntries = true; + } - entryList[i] = entry; - entryTable[entry.entryName] = entry; - } - } + function iterateEntries(callback) { + const totalEntries = mainHeader.diskEntries; // total number of entries + let index = mainHeader.offset; // offset of first CEN header - function readMainHeader() { - var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size - max = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length - n = max, - endStart = inBuffer.length, - endOffset = -1, // Start offset of the END header - commentEnd = 0; - - for (i; i >= n; i--) { - if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' - if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006" - endOffset = i; - commentEnd = i; - endStart = i + Utils.Constants.ENDHDR; - // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature - n = i - Utils.Constants.END64HDR; - continue; - } + for (let i = 0; i < totalEntries; i++) { + let tmp = index; + const entry = new ZipEntry(inBuffer); - if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { - // Found a zip64 signature, let's continue reading the whole zip64 record - n = max; - continue; - } + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - if (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) { - // Found the zip64 record, let's determine it's size - endOffset = i; - endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; - break; - } - } + index += entry.header.entryHeaderSize; - if (!~endOffset) - throw new Error(Utils.Errors.INVALID_FORMAT); + callback(entry); + } + } - mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); - if (mainHeader.commentLength) { - _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); - } - // readEntries(); - } + function readEntries() { + loadedEntries = true; + entryTable = {}; + entryList = new Array(mainHeader.diskEntries); // total number of entries + var index = mainHeader.offset; // offset of first CEN header + for (var i = 0; i < entryList.length; i++) { + var tmp = index, + entry = new ZipEntry(inBuffer); + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - return { - /** - * Returns an array of ZipEntry objects existent in the current opened archive - * @return Array - */ - get entries() { - if (!loadedEntries) { - readEntries(); - } - return entryList; - }, + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - /** - * Archive comment - * @return {String} - */ - get comment() { - return _comment.toString(); - }, - set comment(val) { - _comment = Utils.toBuffer(val); - mainHeader.commentLength = _comment.length; - }, + if (entry.header.extraLength) { + entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); + } - getEntryCount: function() { - if (!loadedEntries) { - return mainHeader.diskEntries; - } + if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); - return entryList.length; - }, + index += entry.header.entryHeaderSize; - forEach: function(callback) { - if (!loadedEntries) { - iterateEntries(callback); - return; - } + entryList[i] = entry; + entryTable[entry.entryName] = entry; + } + } - entryList.forEach(callback); - }, + function readMainHeader(/*Boolean*/ readNow) { + var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size + max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length + n = max, + endStart = inBuffer.length, + endOffset = -1, // Start offset of the END header + commentEnd = 0; - /** - * Returns a reference to the entry with the given name or null if entry is inexistent - * - * @param entryName - * @return ZipEntry - */ - getEntry: function (/*String*/entryName) { - if (!loadedEntries) { - readEntries(); - } - return entryTable[entryName] || null; - }, + for (i; i >= n; i--) { + if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' + if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { + // "PK\005\006" + endOffset = i; + commentEnd = i; + endStart = i + Utils.Constants.ENDHDR; + // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature + n = i - Utils.Constants.END64HDR; + continue; + } - /** - * Adds the given entry to the entry list - * - * @param entry - */ - setEntry: function (/*ZipEntry*/entry) { - if (!loadedEntries) { - readEntries(); - } - entryList.push(entry); - entryTable[entry.entryName] = entry; - mainHeader.totalEntries = entryList.length; - }, + if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { + // Found a zip64 signature, let's continue reading the whole zip64 record + n = max; + continue; + } - /** - * Removes the entry with the given name from the entry list. - * - * If the entry is a directory, then all nested files and directories will be removed - * @param entryName - */ - deleteEntry: function (/*String*/entryName) { - if (!loadedEntries) { - readEntries(); - } - var entry = entryTable[entryName]; - if (entry && entry.isDirectory) { - var _self = this; - this.getEntryChildren(entry).forEach(function (child) { - if (child.entryName !== entryName) { - _self.deleteEntry(child.entryName) - } - }) - } - entryList.splice(entryList.indexOf(entry), 1); - delete(entryTable[entryName]); - mainHeader.totalEntries = entryList.length; - }, + if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { + // Found the zip64 record, let's determine it's size + endOffset = i; + endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; + break; + } + } - /** - * Iterates and returns all nested files and directories of the given entry - * - * @param entry - * @return Array - */ - getEntryChildren: function (/*ZipEntry*/entry) { - if (!loadedEntries) { - readEntries(); - } - if (entry.isDirectory) { - var list = [], - name = entry.entryName, - len = name.length; - - entryList.forEach(function (zipEntry) { - if (zipEntry.entryName.substr(0, len) === name) { - list.push(zipEntry); - } - }); - return list; - } - return [] - }, + if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); - /** - * Returns the zip file - * - * @return Buffer - */ - compressToBuffer: function () { - if (!loadedEntries) { - readEntries(); - } - if (entryList.length > 1) { - entryList.sort(function (a, b) { - var nameA = a.entryName.toLowerCase(); - var nameB = b.entryName.toLowerCase(); - if (nameA < nameB) { - return -1 - } - if (nameA > nameB) { - return 1 - } - return 0; - }); - } + mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); + if (mainHeader.commentLength) { + _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); + } + if (readNow) readEntries(); + } - var totalSize = 0, - dataBlock = [], - entryHeaders = [], - dindex = 0; - - mainHeader.size = 0; - mainHeader.offset = 0; - - entryList.forEach(function (entry) { - // compress data and set local and entry header accordingly. Reason why is called first - var compressedData = entry.getCompressedData(); - // data header - entry.header.offset = dindex; - var dataHeader = entry.header.dataHeaderToBinary(); - var entryNameLen = entry.rawEntryName.length; - var extra = entry.extra.toString(); - var postHeader = Buffer.alloc(entryNameLen + extra.length); - entry.rawEntryName.copy(postHeader, 0); - postHeader.fill(extra, entryNameLen); - - var dataLength = dataHeader.length + postHeader.length + compressedData.length; - - dindex += dataLength; - - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); - - var entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += (dataLength + entryHeader.length); - }); + function sortEntries() { + if (entryList.length > 1 && !noSort) { + entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); + } + } - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; + return { + /** + * Returns an array of ZipEntry objects existent in the current opened archive + * @return Array + */ + get entries() { + if (!loadedEntries) { + readEntries(); + } + return entryList; + }, - dindex = 0; - var outBuffer = Buffer.alloc(totalSize); - dataBlock.forEach(function (content) { - content.copy(outBuffer, dindex); // write data blocks - dindex += content.length; - }); - entryHeaders.forEach(function (content) { - content.copy(outBuffer, dindex); // write central directory entries - dindex += content.length; - }); + /** + * Archive comment + * @return {String} + */ + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + mainHeader.commentLength = _comment.length; + }, - var mh = mainHeader.toBinary(); - if (_comment) { - Buffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } + getEntryCount: function () { + if (!loadedEntries) { + return mainHeader.diskEntries; + } - mh.copy(outBuffer, dindex); // write main header + return entryList.length; + }, - return outBuffer - }, + forEach: function (callback) { + if (!loadedEntries) { + iterateEntries(callback); + return; + } - toAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) { - if (!loadedEntries) { - readEntries(); - } - if (entryList.length > 1) { - entryList.sort(function (a, b) { - var nameA = a.entryName.toLowerCase(); - var nameB = b.entryName.toLowerCase(); - if (nameA > nameB) { - return -1 - } - if (nameA < nameB) { - return 1 - } - return 0; - }); - } + entryList.forEach(callback); + }, - var totalSize = 0, - dataBlock = [], - entryHeaders = [], - dindex = 0; - - mainHeader.size = 0; - mainHeader.offset = 0; - - var compress = function (entryList) { - var self = arguments.callee; - if (entryList.length) { - var entry = entryList.pop(); - var name = entry.entryName + entry.extra.toString(); - if (onItemStart) onItemStart(name); - entry.getCompressedDataAsync(function (compressedData) { - if (onItemEnd) onItemEnd(name); - - entry.header.offset = dindex; - // data header - var dataHeader = entry.header.dataHeaderToBinary(); - var postHeader; - try { - postHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+ - } catch(e){ - postHeader = new Buffer(name); // use deprecated method if alloc fails... - } - var dataLength = dataHeader.length + postHeader.length + compressedData.length; + /** + * Returns a reference to the entry with the given name or null if entry is inexistent + * + * @param entryName + * @return ZipEntry + */ + getEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + return entryTable[entryName] || null; + }, - dindex += dataLength; + /** + * Adds the given entry to the entry list + * + * @param entry + */ + setEntry: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + entryList.push(entry); + entryTable[entry.entryName] = entry; + mainHeader.totalEntries = entryList.length; + }, - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); + /** + * Removes the entry with the given name from the entry list. + * + * If the entry is a directory, then all nested files and directories will be removed + * @param entryName + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + var entry = entryTable[entryName]; + if (entry && entry.isDirectory) { + var _self = this; + this.getEntryChildren(entry).forEach(function (child) { + if (child.entryName !== entryName) { + _self.deleteEntry(child.entryName); + } + }); + } + entryList.splice(entryList.indexOf(entry), 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + }, - var entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += (dataLength + entryHeader.length); + /** + * Iterates and returns all nested files and directories of the given entry + * + * @param entry + * @return Array + */ + getEntryChildren: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + if (entry && entry.isDirectory) { + const list = []; + const name = entry.entryName; + const len = name.length; + + entryList.forEach(function (zipEntry) { + if (zipEntry.entryName.substr(0, len) === name) { + list.push(zipEntry); + } + }); + return list; + } + return []; + }, - if (entryList.length) { - self(entryList); - } else { + /** + * Returns the zip file + * + * @return Buffer + */ + compressToBuffer: function () { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + for (const entry of entryList) { + // compress data and set local and entry header accordingly. Reason why is called first + const compressedData = entry.getCompressedData(); + // 1. construct data header + entry.header.offset = dindex; + const dataHeader = entry.header.dataHeaderToBinary(); + const entryNameLen = entry.rawEntryName.length; + // 1.2. postheader - data after data header + const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); + entry.rawEntryName.copy(postHeader, 0); + postHeader.copy(entry.extra, entryNameLen); + + // 2. offsets + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + dindex += dataLength; + + // 3. store values in sequence + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + // 4. construct entry header + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + // 5. update main header + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + } + + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + // write data blocks + for (const content of dataBlock) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write central directory entries + for (const content of entryHeaders) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write main header + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + mh.copy(outBuffer, dindex); + + return outBuffer; + }, + toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { + try { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + const compress2Buffer = function (entryLists) { + if (entryLists.length) { + const entry = entryLists.pop(); + const name = entry.entryName + entry.extra.toString(); + if (onItemStart) onItemStart(name); + entry.getCompressedDataAsync(function (compressedData) { + if (onItemEnd) onItemEnd(name); + + entry.header.offset = dindex; + // data header + const dataHeader = entry.header.dataHeaderToBinary(); + const postHeader = Buffer.alloc(name.length, name); + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + + dindex += dataLength; + + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + + compress2Buffer(entryLists); + }); + } else { + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + dataBlock.forEach(function (content) { + content.copy(outBuffer, dindex); // write data blocks + dindex += content.length; + }); + entryHeaders.forEach(function (content) { + content.copy(outBuffer, dindex); // write central directory entries + dindex += content.length; + }); - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; - - dindex = 0; - var outBuffer = Buffer.alloc(totalSize); - dataBlock.forEach(function (content) { - content.copy(outBuffer, dindex); // write data blocks - dindex += content.length; - }); - entryHeaders.forEach(function (content) { - content.copy(outBuffer, dindex); // write central directory entries - dindex += content.length; - }); - - var mh = mainHeader.toBinary(); - if (_comment) { - _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } - mh.copy(outBuffer, dindex); // write main header + mh.copy(outBuffer, dindex); // write main header - onSuccess(outBuffer); - } - }); - } - }; + onSuccess(outBuffer); + } + }; - compress(entryList); - } - } + compress2Buffer(entryList); + } catch (e) { + onFail(e); + } + } + }; }; @@ -10823,63 +13586,67 @@ module.exports = function (/*String|Buffer*/input, /*Number*/inputType) { /***/ 3682: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var register = __nccwpck_require__(4670) -var addHook = __nccwpck_require__(5549) -var removeHook = __nccwpck_require__(6819) +var register = __nccwpck_require__(4670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); // bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); } -function HookSingular () { - var singularHookName = 'h' +function HookSingular() { + var singularHookName = "h"; var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; } -function HookCollection () { +function HookCollection() { var state = { - registry: {} - } + registry: {}, + }; - var hook = register.bind(null, state) - bindApi(hook, state) + var hook = register.bind(null, state); + bindApi(hook, state); - return hook + return hook; } -var collectionHookDeprecationMessageDisplayed = false -function Hook () { +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; } - return HookCollection() + return HookCollection(); } -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); -module.exports = Hook +module.exports = Hook; // expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; /***/ }), @@ -10887,51 +13654,51 @@ module.exports.Collection = Hook.Collection /***/ 5549: /***/ ((module) => { -module.exports = addHook +module.exports = addHook; -function addHook (state, kind, name, hook) { - var orig = hook +function addHook(state, kind, name, hook) { + var orig = hook; if (!state.registry[name]) { - state.registry[name] = [] + state.registry[name] = []; } - if (kind === 'before') { + if (kind === "before") { hook = function (method, options) { return Promise.resolve() .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } + .then(method.bind(null, options)); + }; } - if (kind === 'after') { + if (kind === "after") { hook = function (method, options) { - var result + var result; return Promise.resolve() .then(method.bind(null, options)) .then(function (result_) { - result = result_ - return orig(result, options) + result = result_; + return orig(result, options); }) .then(function () { - return result - }) - } + return result; + }); + }; } - if (kind === 'error') { + if (kind === "error") { hook = function (method, options) { return Promise.resolve() .then(method.bind(null, options)) .catch(function (error) { - return orig(error, options) - }) - } + return orig(error, options); + }); + }; } state.registry[name].push({ hook: hook, - orig: orig - }) + orig: orig, + }); } @@ -10940,33 +13707,32 @@ function addHook (state, kind, name, hook) { /***/ 4670: /***/ ((module) => { -module.exports = register +module.exports = register; -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); } if (!options) { - options = {} + options = {}; } if (Array.isArray(name)) { return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() + return register.bind(null, state, name, callback, options); + }, method)(); } - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); } @@ -10975,5666 +13741,5180 @@ function register (state, name, method, options) { /***/ 6819: /***/ ((module) => { -module.exports = removeHook +module.exports = removeHook; -function removeHook (state, name, method) { +function removeHook(state, name, method) { if (!state.registry[name]) { - return + return; } var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); if (index === -1) { - return + return; } - state.registry[name].splice(index, 1) + state.registry[name].splice(index, 1); } /***/ }), -/***/ 2286: +/***/ 610: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { - V4MAPPED, - ADDRCONFIG, - ALL, - promises: { - Resolver: AsyncResolver - }, - lookup: dnsLookup -} = __nccwpck_require__(881); -const {promisify} = __nccwpck_require__(1669); -const os = __nccwpck_require__(2087); -const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection'); -const kCacheableLookupInstance = Symbol('cacheableLookupInstance'); -const kExpires = Symbol('expires'); +const stringify = __nccwpck_require__(8750); +const compile = __nccwpck_require__(9434); +const expand = __nccwpck_require__(5873); +const parse = __nccwpck_require__(6477); -const supportsALL = typeof ALL === 'number'; +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ -const verifyAgent = agent => { - if (!(agent && typeof agent.createConnection === 'function')) { - throw new Error('Expected an Agent instance as the first argument'); - } -}; +const braces = (input, options = {}) => { + let output = []; -const map4to6 = entries => { - for (const entry of entries) { - if (entry.family === 6) { - continue; - } + if (Array.isArray(input)) { + for (let pattern of input) { + let result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } - entry.address = `::ffff:${entry.address}`; - entry.family = 6; - } + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; }; -const getIfaceInfo = () => { - let has4 = false; - let has6 = false; - - for (const device of Object.values(os.networkInterfaces())) { - for (const iface of device) { - if (iface.internal) { - continue; - } +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ - if (iface.family === 'IPv6') { - has6 = true; - } else { - has4 = true; - } +braces.parse = (input, options = {}) => parse(input, options); - if (has4 && has6) { - return {has4, has6}; - } - } - } +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ - return {has4, has6}; +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); }; -const isIterable = map => { - return Symbol.iterator in map; +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); }; -const ttl = {ttl: true}; -const all = {all: true}; +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ -class CacheableLookup { - constructor({ - cache = new Map(), - maxTtl = Infinity, - fallbackDuration = 3600, - errorTtl = 0.15, - resolver = new AsyncResolver(), - lookup = dnsLookup - } = {}) { - this.maxTtl = maxTtl; - this.errorTtl = errorTtl; +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } - this._cache = cache; - this._resolver = resolver; - this._dnsLookup = promisify(lookup); + let result = expand(input, options); - if (this._resolver instanceof AsyncResolver) { - this._resolve4 = this._resolver.resolve4.bind(this._resolver); - this._resolve6 = this._resolver.resolve6.bind(this._resolver); - } else { - this._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver)); - this._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver)); - } + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } - this._iface = getIfaceInfo(); + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } - this._pending = {}; - this._nextRemovalTime = false; - this._hostnamesToFallback = new Set(); + return result; +}; - if (fallbackDuration < 1) { - this._fallback = false; - } else { - this._fallback = true; +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ - const interval = setInterval(() => { - this._hostnamesToFallback.clear(); - }, fallbackDuration * 1000); +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } - /* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */ - if (interval.unref) { - interval.unref(); - } - } + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; - this.lookup = this.lookup.bind(this); - this.lookupAsync = this.lookupAsync.bind(this); - } +/** + * Expose "braces" + */ - set servers(servers) { - this.clear(); +module.exports = braces; - this._resolver.setServers(servers); - } - get servers() { - return this._resolver.getServers(); - } +/***/ }), - lookup(hostname, options, callback) { - if (typeof options === 'function') { - callback = options; - options = {}; - } else if (typeof options === 'number') { - options = { - family: options - }; - } +/***/ 9434: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!callback) { - throw new Error('Callback must be a function.'); - } +"use strict"; - // eslint-disable-next-line promise/prefer-await-to-then - this.lookupAsync(hostname, options).then(result => { - if (options.all) { - callback(null, result); - } else { - callback(null, result.address, result.family, result.expires, result.ttl); - } - }, callback); - } - async lookupAsync(hostname, options = {}) { - if (typeof options === 'number') { - options = { - family: options - }; - } +const fill = __nccwpck_require__(6330); +const utils = __nccwpck_require__(5207); - let cached = await this.query(hostname); +const compile = (ast, options = {}) => { + let walk = (node, parent = {}) => { + let invalidBlock = utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let invalid = invalidBlock === true || invalidNode === true; + let prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; - if (options.family === 6) { - const filtered = cached.filter(entry => entry.family === 6); + if (node.isOpen === true) { + return prefix + node.value; + } + if (node.isClose === true) { + return prefix + node.value; + } - if (options.hints & V4MAPPED) { - if ((supportsALL && options.hints & ALL) || filtered.length === 0) { - map4to6(cached); - } else { - cached = filtered; - } - } else { - cached = filtered; - } - } else if (options.family === 4) { - cached = cached.filter(entry => entry.family === 4); - } + if (node.type === 'open') { + return invalid ? (prefix + node.value) : '('; + } - if (options.hints & ADDRCONFIG) { - const {_iface} = this; - cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4); - } + if (node.type === 'close') { + return invalid ? (prefix + node.value) : ')'; + } - if (cached.length === 0) { - const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`); - error.code = 'ENOTFOUND'; - error.hostname = hostname; + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); + } - throw error; - } + if (node.value) { + return node.value; + } - if (options.all) { - return cached; - } + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); + let range = fill(...args, { ...options, wrap: false, toRegex: true }); - return cached[0]; - } + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } - async query(hostname) { - let cached = await this._cache.get(hostname); + if (node.nodes) { + for (let child of node.nodes) { + output += walk(child, node); + } + } + return output; + }; - if (!cached) { - const pending = this._pending[hostname]; + return walk(ast); +}; - if (pending) { - cached = await pending; - } else { - const newPromise = this.queryAndCache(hostname); - this._pending[hostname] = newPromise; +module.exports = compile; - try { - cached = await newPromise; - } finally { - delete this._pending[hostname]; - } - } - } - cached = cached.map(entry => { - return {...entry}; - }); +/***/ }), - return cached; - } +/***/ 8774: +/***/ ((module) => { - async _resolve(hostname) { - const wrap = async promise => { - try { - return await promise; - } catch (error) { - if (error.code === 'ENODATA' || error.code === 'ENOTFOUND') { - return []; - } +"use strict"; - throw error; - } - }; - // ANY is unsafe as it doesn't trigger new queries in the underlying server. - const [A, AAAA] = await Promise.all([ - this._resolve4(hostname, ttl), - this._resolve6(hostname, ttl) - ].map(promise => wrap(promise))); +module.exports = { + MAX_LENGTH: 1024 * 64, - let aTtl = 0; - let aaaaTtl = 0; - let cacheTtl = 0; + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ - const now = Date.now(); + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ - for (const entry of A) { - entry.family = 4; - entry.expires = now + (entry.ttl * 1000); + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ - aTtl = Math.max(aTtl, entry.ttl); - } + CHAR_ASTERISK: '*', /* * */ - for (const entry of AAAA) { - entry.family = 6; - entry.expires = now + (entry.ttl * 1000); + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; - aaaaTtl = Math.max(aaaaTtl, entry.ttl); - } - if (A.length > 0) { - if (AAAA.length > 0) { - cacheTtl = Math.min(aTtl, aaaaTtl); - } else { - cacheTtl = aTtl; - } - } else { - cacheTtl = aaaaTtl; - } +/***/ }), - return { - entries: [ - ...A, - ...AAAA - ], - cacheTtl - }; - } +/***/ 5873: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - async _lookup(hostname) { - try { - const entries = await this._dnsLookup(hostname, { - all: true - }); +"use strict"; - return { - entries, - cacheTtl: 0 - }; - } catch (_) { - return { - entries: [], - cacheTtl: 0 - }; - } - } - async _set(hostname, data, cacheTtl) { - if (this.maxTtl > 0 && cacheTtl > 0) { - cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000; - data[kExpires] = Date.now() + cacheTtl; +const fill = __nccwpck_require__(6330); +const stringify = __nccwpck_require__(8750); +const utils = __nccwpck_require__(5207); - try { - await this._cache.set(hostname, data, cacheTtl); - } catch (error) { - this.lookupAsync = async () => { - const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.'); - cacheError.cause = error; +const append = (queue = '', stash = '', enclose = false) => { + let result = []; - throw cacheError; - }; - } + queue = [].concat(queue); + stash = [].concat(stash); - if (isIterable(this._cache)) { - this._tick(cacheTtl); - } - } - } + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } - async queryAndCache(hostname) { - if (this._hostnamesToFallback.has(hostname)) { - return this._dnsLookup(hostname, all); - } + for (let item of queue) { + if (Array.isArray(item)) { + for (let value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele)); + } + } + } + return utils.flatten(result); +}; - let query = await this._resolve(hostname); +const expand = (ast, options = {}) => { + let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; - if (query.entries.length === 0 && this._fallback) { - query = await this._lookup(hostname); + let walk = (node, parent = {}) => { + node.queue = []; - if (query.entries.length !== 0) { - // Use `dns.lookup(...)` for that particular hostname - this._hostnamesToFallback.add(hostname); - } - } + let p = parent; + let q = parent.queue; - const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl; - await this._set(hostname, query.entries, cacheTtl); + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } - return query.entries; - } + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } - _tick(ms) { - const nextRemovalTime = this._nextRemovalTime; + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } - if (!nextRemovalTime || ms < nextRemovalTime) { - clearTimeout(this._removalTimeout); + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); - this._nextRemovalTime = ms; + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } - this._removalTimeout = setTimeout(() => { - this._nextRemovalTime = false; + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } - let nextExpiry = Infinity; + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } - const now = Date.now(); + let enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; - for (const [hostname, entries] of this._cache) { - const expires = entries[kExpires]; + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } - if (now >= expires) { - this._cache.delete(hostname); - } else if (expires < nextExpiry) { - nextExpiry = expires; - } - } + for (let i = 0; i < node.nodes.length; i++) { + let child = node.nodes[i]; - if (nextExpiry !== Infinity) { - this._tick(nextExpiry - now); - } - }, ms); + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } - /* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */ - if (this._removalTimeout.unref) { - this._removalTimeout.unref(); - } - } - } + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } - install(agent) { - verifyAgent(agent); + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } - if (kCacheableLookupCreateConnection in agent) { - throw new Error('CacheableLookup has been already installed'); - } + if (child.nodes) { + walk(child, node); + } + } - agent[kCacheableLookupCreateConnection] = agent.createConnection; - agent[kCacheableLookupInstance] = this; + return queue; + }; - agent.createConnection = (options, callback) => { - if (!('lookup' in options)) { - options.lookup = this.lookup; - } + return utils.flatten(walk(ast)); +}; - return agent[kCacheableLookupCreateConnection](options, callback); - }; - } +module.exports = expand; - uninstall(agent) { - verifyAgent(agent); - if (agent[kCacheableLookupCreateConnection]) { - if (agent[kCacheableLookupInstance] !== this) { - throw new Error('The agent is not owned by this CacheableLookup instance'); - } +/***/ }), - agent.createConnection = agent[kCacheableLookupCreateConnection]; +/***/ 6477: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - delete agent[kCacheableLookupCreateConnection]; - delete agent[kCacheableLookupInstance]; - } - } +"use strict"; - updateInterfaceInfo() { - const {_iface} = this; - this._iface = getIfaceInfo(); +const stringify = __nccwpck_require__(8750); - if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) { - this._cache.clear(); - } - } +/** + * Constants + */ - clear(hostname) { - if (hostname) { - this._cache.delete(hostname); - return; - } +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = __nccwpck_require__(8774); - this._cache.clear(); - } -} +/** + * parse + */ -module.exports = CacheableLookup; -module.exports.default = CacheableLookup; +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + let opts = options || {}; + let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } -/***/ }), + let ast = { type: 'root', input, nodes: [] }; + let stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + let length = input.length; + let index = 0; + let depth = 0; + let value; + let memo = {}; -/***/ 4340: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Helpers + */ -"use strict"; + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } -const {PassThrough: PassThroughStream} = __nccwpck_require__(2413); + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } -module.exports = options => { - options = {...options}; + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; - const {array} = options; - let {encoding} = options; - const isBuffer = encoding === 'buffer'; - let objectMode = false; + push({ type: 'bos' }); - if (array) { - objectMode = !(encoding || isBuffer); - } else { - encoding = encoding || 'utf8'; - } + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); - if (isBuffer) { - encoding = null; - } + /** + * Invalid chars + */ - const stream = new PassThroughStream({objectMode}); + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } - if (encoding) { - stream.setEncoding(encoding); - } + /** + * Escaped chars + */ - let length = 0; - const chunks = []; + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } - stream.on('data', chunk => { - chunks.push(chunk); + /** + * Right square bracket (literal): ']' + */ - if (objectMode) { - length = chunks.length; - } else { - length += chunk.length; - } - }); + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } - stream.getBufferedValue = () => { - if (array) { - return chunks; - } + /** + * Left square bracket: '[' + */ - return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); - }; + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; - stream.getBufferedLength = () => length; + let closed = true; + let next; - return stream; -}; + while (index < length && (next = advance())) { + value += next; + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } -/***/ }), + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } -/***/ 7040: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; -"use strict"; + if (brackets === 0) { + break; + } + } + } -const {constants: BufferConstants} = __nccwpck_require__(4293); -const pump = __nccwpck_require__(8341); -const bufferStream = __nccwpck_require__(4340); + push({ type: 'text', value }); + continue; + } -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} + /** + * Parentheses + */ -async function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } - options = { - maxBuffer: Infinity, - ...options - }; + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } - const {maxBuffer} = options; + /** + * Quotes: '|"|` + */ - let stream; - await new Promise((resolve, reject) => { - const rejectPromise = error => { - // Don't retrieve an oversized buffer. - if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { - error.bufferedData = stream.getBufferedValue(); - } + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + let open = value; + let next; - reject(error); - }; + if (options.keepQuotes !== true) { + value = ''; + } - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } - resolve(); - }); + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }); + value += next; + } - return stream.getBufferedValue(); -} + push({ type: 'text', value }); + continue; + } -module.exports = getStream; -// TODO: Remove this for the next major release -module.exports.default = getStream; -module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); -module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); -module.exports.MaxBufferError = MaxBufferError; + /** + * Left curly brace: '{' + */ + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; -/***/ }), + let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + let brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; -/***/ 8116: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } -"use strict"; + /** + * Right curly brace: '}' + */ + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } -const EventEmitter = __nccwpck_require__(8614); -const urlLib = __nccwpck_require__(8835); -const normalizeUrl = __nccwpck_require__(7952); -const getStream = __nccwpck_require__(7040); -const CachePolicy = __nccwpck_require__(1002); -const Response = __nccwpck_require__(9004); -const lowercaseKeys = __nccwpck_require__(9662); -const cloneResponse = __nccwpck_require__(1312); -const Keyv = __nccwpck_require__(1531); + let type = 'close'; + block = stack.pop(); + block.close = true; -class CacheableRequest { - constructor(request, cacheAdapter) { - if (typeof request !== 'function') { - throw new TypeError('Parameter `request` must be a function'); - } + push({ type, value }); + depth--; - this.cache = new Keyv({ - uri: typeof cacheAdapter === 'string' && cacheAdapter, - store: typeof cacheAdapter !== 'string' && cacheAdapter, - namespace: 'cacheable-request' - }); + block = stack[stack.length - 1]; + continue; + } - return this.createCacheableRequest(request); - } + /** + * Comma: ',' + */ - createCacheableRequest(request) { - return (opts, cb) => { - let url; - if (typeof opts === 'string') { - url = normalizeUrlObject(urlLib.parse(opts)); - opts = {}; - } else if (opts instanceof urlLib.URL) { - url = normalizeUrlObject(urlLib.parse(opts.toString())); - opts = {}; - } else { - const [pathname, ...searchParts] = (opts.path || '').split('?'); - const search = searchParts.length > 0 ? - `?${searchParts.join('?')}` : - ''; - url = normalizeUrlObject({ ...opts, pathname, search }); - } + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + let open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } - opts = { - headers: {}, - method: 'GET', - cache: true, - strictTtl: false, - automaticFailover: false, - ...opts, - ...urlObjectToRequestOptions(url) - }; - opts.headers = lowercaseKeys(opts.headers); + push({ type: 'comma', value }); + block.commas++; + continue; + } - const ee = new EventEmitter(); - const normalizedUrlString = normalizeUrl( - urlLib.format(url), - { - stripWWW: false, - removeTrailingSlash: false, - stripAuthentication: false - } - ); - const key = `${opts.method}:${normalizedUrlString}`; - let revalidate = false; - let madeRequest = false; + /** + * Dot: '.' + */ - const makeRequest = opts => { - madeRequest = true; - let requestErrored = false; - let requestErrorCallback; + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + let siblings = block.nodes; - const requestErrorPromise = new Promise(resolve => { - requestErrorCallback = () => { - if (!requestErrored) { - requestErrored = true; - resolve(); - } - }; - }); + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } - const handler = response => { - if (revalidate && !opts.forceRefresh) { - response.status = response.statusCode; - const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); - if (!revalidatedPolicy.modified) { - const headers = revalidatedPolicy.policy.responseHeaders(); - response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url); - response.cachePolicy = revalidatedPolicy.policy; - response.fromCache = true; - } - } + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; - if (!response.fromCache) { - response.cachePolicy = new CachePolicy(opts, response, opts); - response.fromCache = false; - } + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } - let clonedResponse; - if (opts.cache && response.cachePolicy.storable()) { - clonedResponse = cloneResponse(response); + block.ranges++; + block.args = []; + continue; + } - (async () => { - try { - const bodyPromise = getStream.buffer(response); + if (prev.type === 'range') { + siblings.pop(); - await Promise.race([ - requestErrorPromise, - new Promise(resolve => response.once('end', resolve)) - ]); + let before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } - if (requestErrored) { - return; - } + push({ type: 'dot', value }); + continue; + } - const body = await bodyPromise; + /** + * Text + */ - const value = { - cachePolicy: response.cachePolicy.toObject(), - url: response.url, - statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, - body - }; + push({ type: 'text', value }); + } - let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; - if (opts.maxTtl) { - ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl; - } + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); - await this.cache.set(key, value, ttl); - } catch (error) { - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); - } else if (opts.cache && revalidate) { - (async () => { - try { - await this.cache.delete(key); - } catch (error) { - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); - } + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); - ee.emit('response', clonedResponse || response); - if (typeof cb === 'function') { - cb(clonedResponse || response); - } - }; + // get the location of the block on parent.nodes (block's siblings) + let parent = stack[stack.length - 1]; + let index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); - try { - const req = request(opts, handler); - req.once('error', requestErrorCallback); - req.once('abort', requestErrorCallback); - ee.emit('request', req); - } catch (error) { - ee.emit('error', new CacheableRequest.RequestError(error)); - } - }; + push({ type: 'eos' }); + return ast; +}; - (async () => { - const get = async opts => { - await Promise.resolve(); +module.exports = parse; - const cacheEntry = opts.cache ? await this.cache.get(key) : undefined; - if (typeof cacheEntry === 'undefined') { - return makeRequest(opts); - } - const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); - if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) { - const headers = policy.responseHeaders(); - const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); - response.cachePolicy = policy; - response.fromCache = true; +/***/ }), - ee.emit('response', response); - if (typeof cb === 'function') { - cb(response); - } - } else { - revalidate = cacheEntry; - opts.headers = policy.revalidationHeaders(opts); - makeRequest(opts); - } - }; +/***/ 8750: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error)); - this.cache.once('error', errorHandler); - ee.on('response', () => this.cache.removeListener('error', errorHandler)); +"use strict"; - try { - await get(opts); - } catch (error) { - if (opts.automaticFailover && !madeRequest) { - makeRequest(opts); - } - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); +const utils = __nccwpck_require__(5207); - return ee; - }; - } -} +module.exports = (ast, options = {}) => { + let stringify = (node, parent = {}) => { + let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; -function urlObjectToRequestOptions(url) { - const options = { ...url }; - options.path = `${url.pathname || '/'}${url.search || ''}`; - delete options.pathname; - delete options.search; - return options; -} + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } -function normalizeUrlObject(url) { - // If url was parsed by url.parse or new URL: - // - hostname will be set - // - host will be hostname[:port] - // - port will be set if it was explicit in the parsed string - // Otherwise, url was from request options: - // - hostname or host may be set - // - host shall not have port encoded - return { - protocol: url.protocol, - auth: url.auth, - hostname: url.hostname || url.host || 'localhost', - port: url.port, - pathname: url.pathname, - search: url.search - }; -} + if (node.value) { + return node.value; + } -CacheableRequest.RequestError = class extends Error { - constructor(error) { - super(error.message); - this.name = 'RequestError'; - Object.assign(this, error); - } -}; + if (node.nodes) { + for (let child of node.nodes) { + output += stringify(child); + } + } + return output; + }; -CacheableRequest.CacheError = class extends Error { - constructor(error) { - super(error.message); - this.name = 'CacheError'; - Object.assign(this, error); - } + return stringify(ast); }; -module.exports = CacheableRequest; /***/ }), -/***/ 1312: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 5207: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -const PassThrough = __nccwpck_require__(2413).PassThrough; -const mimicResponse = __nccwpck_require__(2610); - -const cloneResponse = response => { - if (!(response && response.pipe)) { - throw new TypeError('Parameter `response` must be a response stream.'); - } - - const clone = new PassThrough(); - mimicResponse(response, clone); - - return response.pipe(clone); +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; }; -module.exports = cloneResponse; - - -/***/ }), - -/***/ 2391: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * Find a node of the given type + */ -"use strict"; +exports.find = (node, type) => node.nodes.find(node => node.type === type); -const {Transform, PassThrough} = __nccwpck_require__(2413); -const zlib = __nccwpck_require__(8761); -const mimicResponse = __nccwpck_require__(3877); +/** + * Find a node of the given type + */ -module.exports = response => { - const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; - if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { - return response; - } +/** + * Escape the given node with '\\' before node.value + */ - // TODO: Remove this when targeting Node.js 12. - const isBrotli = contentEncoding === 'br'; - if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { - response.destroy(new Error('Brotli is not supported on Node.js < 12')); - return response; - } +exports.escapeNode = (block, n = 0, type) => { + let node = block.nodes[n]; + if (!node) return; - let isEmpty = true; + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; - const checker = new Transform({ - transform(data, _encoding, callback) { - isEmpty = false; +/** + * Returns true if the given brace node should be enclosed in literal braces + */ - callback(null, data); - }, +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; - flush(callback) { - callback(); - } - }); +/** + * Returns true if a brace node is invalid. + */ - const finalStream = new PassThrough({ - autoDestroy: false, - destroy(error, callback) { - response.destroy(); +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; - callback(error); - } - }); +/** + * Returns true if a node is an open or close node + */ - const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; - decompressStream.once('error', error => { - if (isEmpty && !response.readable) { - finalStream.end(); - return; - } +/** + * Reduce an array of text nodes. + */ - finalStream.destroy(error); - }); +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); - mimicResponse(response, finalStream); - response.pipe(checker).pipe(decompressStream).pipe(finalStream); +/** + * Flatten an array + */ - return finalStream; +exports.flatten = (...args) => { + const result = []; + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + let ele = arr[i]; + Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele); + } + return result; + }; + flat(args); + return result; }; /***/ }), -/***/ 3877: -/***/ ((module) => { +/***/ 8367: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const { + V4MAPPED, + ADDRCONFIG, + ALL, + promises: { + Resolver: AsyncResolver + }, + lookup: dnsLookup +} = __nccwpck_require__(9523); +const {promisify} = __nccwpck_require__(3837); +const os = __nccwpck_require__(2037); -// We define these manually to ensure they're always copied -// even if they would move up the prototype chain -// https://nodejs.org/api/http.html#http_class_http_incomingmessage -const knownProperties = [ - 'aborted', - 'complete', - 'headers', - 'httpVersion', - 'httpVersionMinor', - 'httpVersionMajor', - 'method', - 'rawHeaders', - 'rawTrailers', - 'setTimeout', - 'socket', - 'statusCode', - 'statusMessage', - 'trailers', - 'url' -]; - -module.exports = (fromStream, toStream) => { - if (toStream._readableState.autoDestroy) { - throw new Error('The second stream must have the `autoDestroy` option set to `false`'); - } +const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection'); +const kCacheableLookupInstance = Symbol('cacheableLookupInstance'); +const kExpires = Symbol('expires'); - const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); +const supportsALL = typeof ALL === 'number'; - const properties = {}; +const verifyAgent = agent => { + if (!(agent && typeof agent.createConnection === 'function')) { + throw new Error('Expected an Agent instance as the first argument'); + } +}; - for (const property of fromProperties) { - // Don't overwrite existing properties. - if (property in toStream) { +const map4to6 = entries => { + for (const entry of entries) { + if (entry.family === 6) { continue; } - properties[property] = { - get() { - const value = fromStream[property]; - const isFunction = typeof value === 'function'; - - return isFunction ? value.bind(fromStream) : value; - }, - set(value) { - fromStream[property] = value; - }, - enumerable: true, - configurable: false - }; + entry.address = `::ffff:${entry.address}`; + entry.family = 6; } +}; - Object.defineProperties(toStream, properties); - - fromStream.once('aborted', () => { - toStream.destroy(); +const getIfaceInfo = () => { + let has4 = false; + let has6 = false; - toStream.emit('aborted'); - }); + for (const device of Object.values(os.networkInterfaces())) { + for (const iface of device) { + if (iface.internal) { + continue; + } - fromStream.once('close', () => { - if (fromStream.complete) { - if (toStream.readable) { - toStream.once('end', () => { - toStream.emit('close'); - }); + if (iface.family === 'IPv6') { + has6 = true; } else { - toStream.emit('close'); + has4 = true; + } + + if (has4 && has6) { + return {has4, has6}; } - } else { - toStream.emit('close'); } - }); + } - return toStream; + return {has4, has6}; }; +const isIterable = map => { + return Symbol.iterator in map; +}; -/***/ }), +const ttl = {ttl: true}; +const all = {all: true}; -/***/ 6214: -/***/ ((module, exports) => { +class CacheableLookup { + constructor({ + cache = new Map(), + maxTtl = Infinity, + fallbackDuration = 3600, + errorTtl = 0.15, + resolver = new AsyncResolver(), + lookup = dnsLookup + } = {}) { + this.maxTtl = maxTtl; + this.errorTtl = errorTtl; -"use strict"; + this._cache = cache; + this._resolver = resolver; + this._dnsLookup = promisify(lookup); -Object.defineProperty(exports, "__esModule", ({ value: true })); -function isTLSSocket(socket) { - return socket.encrypted; -} -const deferToConnect = (socket, fn) => { - let listeners; - if (typeof fn === 'function') { - const connect = fn; - listeners = { connect }; - } - else { - listeners = fn; - } - const hasConnectListener = typeof listeners.connect === 'function'; - const hasSecureConnectListener = typeof listeners.secureConnect === 'function'; - const hasCloseListener = typeof listeners.close === 'function'; - const onConnect = () => { - if (hasConnectListener) { - listeners.connect(); - } - if (isTLSSocket(socket) && hasSecureConnectListener) { - if (socket.authorized) { - listeners.secureConnect(); - } - else if (!socket.authorizationError) { - socket.once('secureConnect', listeners.secureConnect); - } - } - if (hasCloseListener) { - socket.once('close', listeners.close); - } - }; - if (socket.writable && !socket.connecting) { - onConnect(); - } - else if (socket.connecting) { - socket.once('connect', onConnect); - } - else if (socket.destroyed && hasCloseListener) { - listeners.close(socket._hadError); - } -}; -exports.default = deferToConnect; -// For CommonJS default export support -module.exports = deferToConnect; -module.exports.default = deferToConnect; + if (this._resolver instanceof AsyncResolver) { + this._resolve4 = this._resolver.resolve4.bind(this._resolver); + this._resolve6 = this._resolver.resolve6.bind(this._resolver); + } else { + this._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver)); + this._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver)); + } + this._iface = getIfaceInfo(); -/***/ }), + this._pending = {}; + this._nextRemovalTime = false; + this._hostnamesToFallback = new Set(); -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { + if (fallbackDuration < 1) { + this._fallback = false; + } else { + this._fallback = true; -"use strict"; + const interval = setInterval(() => { + this._hostnamesToFallback.clear(); + }, fallbackDuration * 1000); + /* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */ + if (interval.unref) { + interval.unref(); + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); + this.lookup = this.lookup.bind(this); + this.lookupAsync = this.lookupAsync.bind(this); + } -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) + set servers(servers) { + this.clear(); - /* istanbul ignore next */ + this._resolver.setServers(servers); + } - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + get servers() { + return this._resolver.getServers(); + } - this.name = 'Deprecation'; - } + lookup(hostname, options, callback) { + if (typeof options === 'function') { + callback = options; + options = {}; + } else if (typeof options === 'number') { + options = { + family: options + }; + } -} + if (!callback) { + throw new Error('Callback must be a function.'); + } -exports.Deprecation = Deprecation; + // eslint-disable-next-line promise/prefer-await-to-then + this.lookupAsync(hostname, options).then(result => { + if (options.all) { + callback(null, result); + } else { + callback(null, result.address, result.family, result.expires, result.ttl); + } + }, callback); + } + async lookupAsync(hostname, options = {}) { + if (typeof options === 'number') { + options = { + family: options + }; + } -/***/ }), + let cached = await this.query(hostname); -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (options.family === 6) { + const filtered = cached.filter(entry => entry.family === 6); -var once = __nccwpck_require__(1223); + if (options.hints & V4MAPPED) { + if ((supportsALL && options.hints & ALL) || filtered.length === 0) { + map4to6(cached); + } else { + cached = filtered; + } + } else { + cached = filtered; + } + } else if (options.family === 4) { + cached = cached.filter(entry => entry.family === 4); + } -var noop = function() {}; + if (options.hints & ADDRCONFIG) { + const {_iface} = this; + cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4); + } -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; + if (cached.length === 0) { + const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`); + error.code = 'ENOTFOUND'; + error.hostname = hostname; -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; + throw error; + } -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; + if (options.all) { + return cached; + } - callback = once(callback || noop); + return cached[0]; + } - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; + async query(hostname) { + let cached = await this._cache.get(hostname); - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; + if (!cached) { + const pending = this._pending[hostname]; - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; + if (pending) { + cached = await pending; + } else { + const newPromise = this.queryAndCache(hostname); + this._pending[hostname] = newPromise; - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; + try { + cached = await newPromise; + } finally { + delete this._pending[hostname]; + } + } + } - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; + cached = cached.map(entry => { + return {...entry}; + }); - var onerror = function(err) { - callback.call(stream, err); - }; + return cached; + } - var onclose = function() { - process.nextTick(onclosenexttick); - }; + async _resolve(hostname) { + const wrap = async promise => { + try { + return await promise; + } catch (error) { + if (error.code === 'ENODATA' || error.code === 'ENOTFOUND') { + return []; + } - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; + throw error; + } + }; - var onrequest = function() { - stream.req.on('finish', onfinish); - }; + // ANY is unsafe as it doesn't trigger new queries in the underlying server. + const [A, AAAA] = await Promise.all([ + this._resolve4(hostname, ttl), + this._resolve6(hostname, ttl) + ].map(promise => wrap(promise))); - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } + let aTtl = 0; + let aaaaTtl = 0; + let cacheTtl = 0; - if (isChildProcess(stream)) stream.on('exit', onexit); + const now = Date.now(); - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); + for (const entry of A) { + entry.family = 4; + entry.expires = now + (entry.ttl * 1000); - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; + aTtl = Math.max(aTtl, entry.ttl); + } -module.exports = eos; + for (const entry of AAAA) { + entry.family = 6; + entry.expires = now + (entry.ttl * 1000); + aaaaTtl = Math.max(aaaaTtl, entry.ttl); + } -/***/ }), + if (A.length > 0) { + if (AAAA.length > 0) { + cacheTtl = Math.min(aTtl, aaaaTtl); + } else { + cacheTtl = aTtl; + } + } else { + cacheTtl = aaaaTtl; + } -/***/ 5582: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return { + entries: [ + ...A, + ...AAAA + ], + cacheTtl + }; + } -"use strict"; + async _lookup(hostname) { + try { + const entries = await this._dnsLookup(hostname, { + all: true + }); + return { + entries, + cacheTtl: 0 + }; + } catch (_) { + return { + entries: [], + cacheTtl: 0 + }; + } + } -const stringify = __nccwpck_require__(4810); -const compile = __nccwpck_require__(7123); -const expand = __nccwpck_require__(6944); -const parse = __nccwpck_require__(9889); + async _set(hostname, data, cacheTtl) { + if (this.maxTtl > 0 && cacheTtl > 0) { + cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000; + data[kExpires] = Date.now() + cacheTtl; -/** - * Expand the given pattern or create a regex-compatible string. - * - * ```js - * const braces = require('braces'); - * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] - * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {String} - * @api public - */ + try { + await this._cache.set(hostname, data, cacheTtl); + } catch (error) { + this.lookupAsync = async () => { + const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.'); + cacheError.cause = error; -const braces = (input, options = {}) => { - let output = []; + throw cacheError; + }; + } - if (Array.isArray(input)) { - for (let pattern of input) { - let result = braces.create(pattern, options); - if (Array.isArray(result)) { - output.push(...result); - } else { - output.push(result); - } - } - } else { - output = [].concat(braces.create(input, options)); - } + if (isIterable(this._cache)) { + this._tick(cacheTtl); + } + } + } - if (options && options.expand === true && options.nodupes === true) { - output = [...new Set(output)]; - } - return output; -}; + async queryAndCache(hostname) { + if (this._hostnamesToFallback.has(hostname)) { + return this._dnsLookup(hostname, all); + } -/** - * Parse the given `str` with the given `options`. - * - * ```js - * // braces.parse(pattern, [, options]); - * const ast = braces.parse('a/{b,c}/d'); - * console.log(ast); - * ``` - * @param {String} pattern Brace pattern to parse - * @param {Object} options - * @return {Object} Returns an AST - * @api public - */ + let query = await this._resolve(hostname); -braces.parse = (input, options = {}) => parse(input, options); + if (query.entries.length === 0 && this._fallback) { + query = await this._lookup(hostname); -/** - * Creates a braces string from an AST, or an AST node. - * - * ```js - * const braces = require('braces'); - * let ast = braces.parse('foo/{a,b}/bar'); - * console.log(stringify(ast.nodes[2])); //=> '{a,b}' - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ + if (query.entries.length !== 0) { + // Use `dns.lookup(...)` for that particular hostname + this._hostnamesToFallback.add(hostname); + } + } -braces.stringify = (input, options = {}) => { - if (typeof input === 'string') { - return stringify(braces.parse(input, options), options); - } - return stringify(input, options); -}; + const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl; + await this._set(hostname, query.entries, cacheTtl); -/** - * Compiles a brace pattern into a regex-compatible, optimized string. - * This method is called by the main [braces](#braces) function by default. - * - * ```js - * const braces = require('braces'); - * console.log(braces.compile('a/{b,c}/d')); - * //=> ['a/(b|c)/d'] - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ + return query.entries; + } -braces.compile = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - return compile(input, options); -}; + _tick(ms) { + const nextRemovalTime = this._nextRemovalTime; -/** - * Expands a brace pattern into an array. This method is called by the - * main [braces](#braces) function when `options.expand` is true. Before - * using this method it's recommended that you read the [performance notes](#performance)) - * and advantages of using [.compile](#compile) instead. - * - * ```js - * const braces = require('braces'); - * console.log(braces.expand('a/{b,c}/d')); - * //=> ['a/b/d', 'a/c/d']; - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ + if (!nextRemovalTime || ms < nextRemovalTime) { + clearTimeout(this._removalTimeout); -braces.expand = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } + this._nextRemovalTime = ms; - let result = expand(input, options); + this._removalTimeout = setTimeout(() => { + this._nextRemovalTime = false; - // filter out empty strings if specified - if (options.noempty === true) { - result = result.filter(Boolean); - } + let nextExpiry = Infinity; - // filter out duplicates if specified - if (options.nodupes === true) { - result = [...new Set(result)]; - } + const now = Date.now(); - return result; -}; + for (const [hostname, entries] of this._cache) { + const expires = entries[kExpires]; -/** - * Processes a brace pattern and returns either an expanded array - * (if `options.expand` is true), a highly optimized regex-compatible string. - * This method is called by the main [braces](#braces) function. - * - * ```js - * const braces = require('braces'); - * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) - * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ + if (now >= expires) { + this._cache.delete(hostname); + } else if (expires < nextExpiry) { + nextExpiry = expires; + } + } -braces.create = (input, options = {}) => { - if (input === '' || input.length < 3) { - return [input]; - } + if (nextExpiry !== Infinity) { + this._tick(nextExpiry - now); + } + }, ms); - return options.expand !== true - ? braces.compile(input, options) - : braces.expand(input, options); -}; + /* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */ + if (this._removalTimeout.unref) { + this._removalTimeout.unref(); + } + } + } -/** - * Expose "braces" - */ + install(agent) { + verifyAgent(agent); -module.exports = braces; + if (kCacheableLookupCreateConnection in agent) { + throw new Error('CacheableLookup has been already installed'); + } + agent[kCacheableLookupCreateConnection] = agent.createConnection; + agent[kCacheableLookupInstance] = this; -/***/ }), + agent.createConnection = (options, callback) => { + if (!('lookup' in options)) { + options.lookup = this.lookup; + } -/***/ 7123: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return agent[kCacheableLookupCreateConnection](options, callback); + }; + } -"use strict"; + uninstall(agent) { + verifyAgent(agent); + if (agent[kCacheableLookupCreateConnection]) { + if (agent[kCacheableLookupInstance] !== this) { + throw new Error('The agent is not owned by this CacheableLookup instance'); + } -const fill = __nccwpck_require__(791); -const utils = __nccwpck_require__(7691); + agent.createConnection = agent[kCacheableLookupCreateConnection]; -const compile = (ast, options = {}) => { - let walk = (node, parent = {}) => { - let invalidBlock = utils.isInvalidBrace(parent); - let invalidNode = node.invalid === true && options.escapeInvalid === true; - let invalid = invalidBlock === true || invalidNode === true; - let prefix = options.escapeInvalid === true ? '\\' : ''; - let output = ''; + delete agent[kCacheableLookupCreateConnection]; + delete agent[kCacheableLookupInstance]; + } + } - if (node.isOpen === true) { - return prefix + node.value; - } - if (node.isClose === true) { - return prefix + node.value; - } + updateInterfaceInfo() { + const {_iface} = this; - if (node.type === 'open') { - return invalid ? (prefix + node.value) : '('; - } + this._iface = getIfaceInfo(); - if (node.type === 'close') { - return invalid ? (prefix + node.value) : ')'; - } + if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) { + this._cache.clear(); + } + } - if (node.type === 'comma') { - return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); - } + clear(hostname) { + if (hostname) { + this._cache.delete(hostname); + return; + } - if (node.value) { - return node.value; - } + this._cache.clear(); + } +} - if (node.nodes && node.ranges > 0) { - let args = utils.reduce(node.nodes); - let range = fill(...args, { ...options, wrap: false, toRegex: true }); +module.exports = CacheableLookup; +module.exports["default"] = CacheableLookup; - if (range.length !== 0) { - return args.length > 1 && range.length > 1 ? `(${range})` : range; - } - } - if (node.nodes) { - for (let child of node.nodes) { - output += walk(child, node); - } - } - return output; - }; +/***/ }), - return walk(ast); -}; +/***/ 4340: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = compile; +"use strict"; +const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); -/***/ }), +module.exports = options => { + options = {...options}; -/***/ 5412: -/***/ ((module) => { + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; -"use strict"; + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + if (isBuffer) { + encoding = null; + } -module.exports = { - MAX_LENGTH: 1024 * 64, + const stream = new PassThroughStream({objectMode}); - // Digits - CHAR_0: '0', /* 0 */ - CHAR_9: '9', /* 9 */ + if (encoding) { + stream.setEncoding(encoding); + } - // Alphabet chars. - CHAR_UPPERCASE_A: 'A', /* A */ - CHAR_LOWERCASE_A: 'a', /* a */ - CHAR_UPPERCASE_Z: 'Z', /* Z */ - CHAR_LOWERCASE_Z: 'z', /* z */ + let length = 0; + const chunks = []; - CHAR_LEFT_PARENTHESES: '(', /* ( */ - CHAR_RIGHT_PARENTHESES: ')', /* ) */ + stream.on('data', chunk => { + chunks.push(chunk); - CHAR_ASTERISK: '*', /* * */ + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); - // Non-alphabetic chars. - CHAR_AMPERSAND: '&', /* & */ - CHAR_AT: '@', /* @ */ - CHAR_BACKSLASH: '\\', /* \ */ - CHAR_BACKTICK: '`', /* ` */ - CHAR_CARRIAGE_RETURN: '\r', /* \r */ - CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ - CHAR_COLON: ':', /* : */ - CHAR_COMMA: ',', /* , */ - CHAR_DOLLAR: '$', /* . */ - CHAR_DOT: '.', /* . */ - CHAR_DOUBLE_QUOTE: '"', /* " */ - CHAR_EQUAL: '=', /* = */ - CHAR_EXCLAMATION_MARK: '!', /* ! */ - CHAR_FORM_FEED: '\f', /* \f */ - CHAR_FORWARD_SLASH: '/', /* / */ - CHAR_HASH: '#', /* # */ - CHAR_HYPHEN_MINUS: '-', /* - */ - CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ - CHAR_LEFT_CURLY_BRACE: '{', /* { */ - CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ - CHAR_LINE_FEED: '\n', /* \n */ - CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ - CHAR_PERCENT: '%', /* % */ - CHAR_PLUS: '+', /* + */ - CHAR_QUESTION_MARK: '?', /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ - CHAR_RIGHT_CURLY_BRACE: '}', /* } */ - CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ - CHAR_SEMICOLON: ';', /* ; */ - CHAR_SINGLE_QUOTE: '\'', /* ' */ - CHAR_SPACE: ' ', /* */ - CHAR_TAB: '\t', /* \t */ - CHAR_UNDERSCORE: '_', /* _ */ - CHAR_VERTICAL_LINE: '|', /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; }; /***/ }), -/***/ 6944: +/***/ 7040: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const {constants: BufferConstants} = __nccwpck_require__(4300); +const pump = __nccwpck_require__(8341); +const bufferStream = __nccwpck_require__(4340); -const fill = __nccwpck_require__(791); -const stringify = __nccwpck_require__(4810); -const utils = __nccwpck_require__(7691); +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} -const append = (queue = '', stash = '', enclose = false) => { - let result = []; +async function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } - queue = [].concat(queue); - stash = [].concat(stash); + options = { + maxBuffer: Infinity, + ...options + }; - if (!stash.length) return queue; - if (!queue.length) { - return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; - } + const {maxBuffer} = options; - for (let item of queue) { - if (Array.isArray(item)) { - for (let value of item) { - result.push(append(value, stash, enclose)); - } - } else { - for (let ele of stash) { - if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; - result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele)); - } - } - } - return utils.flatten(result); -}; + let stream; + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } -const expand = (ast, options = {}) => { - let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; + reject(error); + }; - let walk = (node, parent = {}) => { - node.queue = []; + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } - let p = parent; - let q = parent.queue; + resolve(); + }); - while (p.type !== 'brace' && p.type !== 'root' && p.parent) { - p = p.parent; - q = p.queue; - } + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); - if (node.invalid || node.dollar) { - q.push(append(q.pop(), stringify(node, options))); - return; - } + return stream.getBufferedValue(); +} - if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { - q.push(append(q.pop(), ['{}'])); - return; - } +module.exports = getStream; +// TODO: Remove this for the next major release +module.exports["default"] = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; - if (node.nodes && node.ranges > 0) { - let args = utils.reduce(node.nodes); - if (utils.exceedsLimit(...args, options.step, rangeLimit)) { - throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); - } +/***/ }), - let range = fill(...args, options); - if (range.length === 0) { - range = stringify(node, options); - } +/***/ 8116: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - q.push(append(q.pop(), range)); - node.nodes = []; - return; - } +"use strict"; - let enclose = utils.encloseBrace(node); - let queue = node.queue; - let block = node; - while (block.type !== 'brace' && block.type !== 'root' && block.parent) { - block = block.parent; - queue = block.queue; - } +const EventEmitter = __nccwpck_require__(2361); +const urlLib = __nccwpck_require__(7310); +const normalizeUrl = __nccwpck_require__(7952); +const getStream = __nccwpck_require__(7040); +const CachePolicy = __nccwpck_require__(1002); +const Response = __nccwpck_require__(9004); +const lowercaseKeys = __nccwpck_require__(9662); +const cloneResponse = __nccwpck_require__(1312); +const Keyv = __nccwpck_require__(1531); - for (let i = 0; i < node.nodes.length; i++) { - let child = node.nodes[i]; +class CacheableRequest { + constructor(request, cacheAdapter) { + if (typeof request !== 'function') { + throw new TypeError('Parameter `request` must be a function'); + } - if (child.type === 'comma' && node.type === 'brace') { - if (i === 1) queue.push(''); - queue.push(''); - continue; - } + this.cache = new Keyv({ + uri: typeof cacheAdapter === 'string' && cacheAdapter, + store: typeof cacheAdapter !== 'string' && cacheAdapter, + namespace: 'cacheable-request' + }); - if (child.type === 'close') { - q.push(append(q.pop(), queue, enclose)); - continue; - } + return this.createCacheableRequest(request); + } - if (child.value && child.type !== 'open') { - queue.push(append(queue.pop(), child.value)); - continue; - } + createCacheableRequest(request) { + return (opts, cb) => { + let url; + if (typeof opts === 'string') { + url = normalizeUrlObject(urlLib.parse(opts)); + opts = {}; + } else if (opts instanceof urlLib.URL) { + url = normalizeUrlObject(urlLib.parse(opts.toString())); + opts = {}; + } else { + const [pathname, ...searchParts] = (opts.path || '').split('?'); + const search = searchParts.length > 0 ? + `?${searchParts.join('?')}` : + ''; + url = normalizeUrlObject({ ...opts, pathname, search }); + } - if (child.nodes) { - walk(child, node); - } - } + opts = { + headers: {}, + method: 'GET', + cache: true, + strictTtl: false, + automaticFailover: false, + ...opts, + ...urlObjectToRequestOptions(url) + }; + opts.headers = lowercaseKeys(opts.headers); - return queue; - }; + const ee = new EventEmitter(); + const normalizedUrlString = normalizeUrl( + urlLib.format(url), + { + stripWWW: false, + removeTrailingSlash: false, + stripAuthentication: false + } + ); + const key = `${opts.method}:${normalizedUrlString}`; + let revalidate = false; + let madeRequest = false; - return utils.flatten(walk(ast)); -}; + const makeRequest = opts => { + madeRequest = true; + let requestErrored = false; + let requestErrorCallback; -module.exports = expand; + const requestErrorPromise = new Promise(resolve => { + requestErrorCallback = () => { + if (!requestErrored) { + requestErrored = true; + resolve(); + } + }; + }); + const handler = response => { + if (revalidate && !opts.forceRefresh) { + response.status = response.statusCode; + const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); + if (!revalidatedPolicy.modified) { + const headers = revalidatedPolicy.policy.responseHeaders(); + response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url); + response.cachePolicy = revalidatedPolicy.policy; + response.fromCache = true; + } + } -/***/ }), + if (!response.fromCache) { + response.cachePolicy = new CachePolicy(opts, response, opts); + response.fromCache = false; + } -/***/ 9889: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let clonedResponse; + if (opts.cache && response.cachePolicy.storable()) { + clonedResponse = cloneResponse(response); -"use strict"; + (async () => { + try { + const bodyPromise = getStream.buffer(response); + await Promise.race([ + requestErrorPromise, + new Promise(resolve => response.once('end', resolve)) + ]); -const stringify = __nccwpck_require__(4810); + if (requestErrored) { + return; + } -/** - * Constants - */ + const body = await bodyPromise; -const { - MAX_LENGTH, - CHAR_BACKSLASH, /* \ */ - CHAR_BACKTICK, /* ` */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_RIGHT_SQUARE_BRACKET, /* ] */ - CHAR_DOUBLE_QUOTE, /* " */ - CHAR_SINGLE_QUOTE, /* ' */ - CHAR_NO_BREAK_SPACE, - CHAR_ZERO_WIDTH_NOBREAK_SPACE -} = __nccwpck_require__(5412); + const value = { + cachePolicy: response.cachePolicy.toObject(), + url: response.url, + statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, + body + }; -/** - * parse - */ + let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; + if (opts.maxTtl) { + ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl; + } -const parse = (input, options = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } + await this.cache.set(key, value, ttl); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } else if (opts.cache && revalidate) { + (async () => { + try { + await this.cache.delete(key); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } - let opts = options || {}; - let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - if (input.length > max) { - throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); - } + ee.emit('response', clonedResponse || response); + if (typeof cb === 'function') { + cb(clonedResponse || response); + } + }; - let ast = { type: 'root', input, nodes: [] }; - let stack = [ast]; - let block = ast; - let prev = ast; - let brackets = 0; - let length = input.length; - let index = 0; - let depth = 0; - let value; - let memo = {}; + try { + const req = request(opts, handler); + req.once('error', requestErrorCallback); + req.once('abort', requestErrorCallback); + ee.emit('request', req); + } catch (error) { + ee.emit('error', new CacheableRequest.RequestError(error)); + } + }; - /** - * Helpers - */ + (async () => { + const get = async opts => { + await Promise.resolve(); - const advance = () => input[index++]; - const push = node => { - if (node.type === 'text' && prev.type === 'dot') { - prev.type = 'text'; - } + const cacheEntry = opts.cache ? await this.cache.get(key) : undefined; + if (typeof cacheEntry === 'undefined') { + return makeRequest(opts); + } - if (prev && prev.type === 'text' && node.type === 'text') { - prev.value += node.value; - return; - } + const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); + if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) { + const headers = policy.responseHeaders(); + const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); + response.cachePolicy = policy; + response.fromCache = true; - block.nodes.push(node); - node.parent = block; - node.prev = prev; - prev = node; - return node; - }; + ee.emit('response', response); + if (typeof cb === 'function') { + cb(response); + } + } else { + revalidate = cacheEntry; + opts.headers = policy.revalidationHeaders(opts); + makeRequest(opts); + } + }; - push({ type: 'bos' }); + const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error)); + this.cache.once('error', errorHandler); + ee.on('response', () => this.cache.removeListener('error', errorHandler)); - while (index < length) { - block = stack[stack.length - 1]; - value = advance(); + try { + await get(opts); + } catch (error) { + if (opts.automaticFailover && !madeRequest) { + makeRequest(opts); + } - /** - * Invalid chars - */ + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); - if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { - continue; - } + return ee; + }; + } +} - /** - * Escaped chars - */ +function urlObjectToRequestOptions(url) { + const options = { ...url }; + options.path = `${url.pathname || '/'}${url.search || ''}`; + delete options.pathname; + delete options.search; + return options; +} - if (value === CHAR_BACKSLASH) { - push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); - continue; - } +function normalizeUrlObject(url) { + // If url was parsed by url.parse or new URL: + // - hostname will be set + // - host will be hostname[:port] + // - port will be set if it was explicit in the parsed string + // Otherwise, url was from request options: + // - hostname or host may be set + // - host shall not have port encoded + return { + protocol: url.protocol, + auth: url.auth, + hostname: url.hostname || url.host || 'localhost', + port: url.port, + pathname: url.pathname, + search: url.search + }; +} - /** - * Right square bracket (literal): ']' - */ +CacheableRequest.RequestError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'RequestError'; + Object.assign(this, error); + } +}; - if (value === CHAR_RIGHT_SQUARE_BRACKET) { - push({ type: 'text', value: '\\' + value }); - continue; - } +CacheableRequest.CacheError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'CacheError'; + Object.assign(this, error); + } +}; - /** - * Left square bracket: '[' - */ +module.exports = CacheableRequest; - if (value === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - let closed = true; - let next; +/***/ }), - while (index < length && (next = advance())) { - value += next; +/***/ 1312: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (next === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - continue; - } +"use strict"; - if (next === CHAR_BACKSLASH) { - value += advance(); - continue; - } - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - brackets--; +const PassThrough = (__nccwpck_require__(2781).PassThrough); +const mimicResponse = __nccwpck_require__(2610); - if (brackets === 0) { - break; - } - } - } +const cloneResponse = response => { + if (!(response && response.pipe)) { + throw new TypeError('Parameter `response` must be a response stream.'); + } - push({ type: 'text', value }); - continue; - } + const clone = new PassThrough(); + mimicResponse(response, clone); - /** - * Parentheses - */ + return response.pipe(clone); +}; - if (value === CHAR_LEFT_PARENTHESES) { - block = push({ type: 'paren', nodes: [] }); - stack.push(block); - push({ type: 'text', value }); - continue; - } +module.exports = cloneResponse; - if (value === CHAR_RIGHT_PARENTHESES) { - if (block.type !== 'paren') { - push({ type: 'text', value }); - continue; - } - block = stack.pop(); - push({ type: 'text', value }); - block = stack[stack.length - 1]; - continue; - } - /** - * Quotes: '|"|` - */ +/***/ }), - if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { - let open = value; - let next; +/***/ 2391: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (options.keepQuotes !== true) { - value = ''; - } +"use strict"; - while (index < length && (next = advance())) { - if (next === CHAR_BACKSLASH) { - value += next + advance(); - continue; - } +const {Transform, PassThrough} = __nccwpck_require__(2781); +const zlib = __nccwpck_require__(9796); +const mimicResponse = __nccwpck_require__(3877); - if (next === open) { - if (options.keepQuotes === true) value += next; - break; - } +module.exports = response => { + const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); - value += next; - } + if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { + return response; + } - push({ type: 'text', value }); - continue; - } + // TODO: Remove this when targeting Node.js 12. + const isBrotli = contentEncoding === 'br'; + if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { + response.destroy(new Error('Brotli is not supported on Node.js < 12')); + return response; + } - /** - * Left curly brace: '{' - */ + let isEmpty = true; - if (value === CHAR_LEFT_CURLY_BRACE) { - depth++; + const checker = new Transform({ + transform(data, _encoding, callback) { + isEmpty = false; - let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; - let brace = { - type: 'brace', - open: true, - close: false, - dollar, - depth, - commas: 0, - ranges: 0, - nodes: [] - }; + callback(null, data); + }, - block = push(brace); - stack.push(block); - push({ type: 'open', value }); - continue; - } + flush(callback) { + callback(); + } + }); - /** - * Right curly brace: '}' - */ + const finalStream = new PassThrough({ + autoDestroy: false, + destroy(error, callback) { + response.destroy(); - if (value === CHAR_RIGHT_CURLY_BRACE) { - if (block.type !== 'brace') { - push({ type: 'text', value }); - continue; - } + callback(error); + } + }); - let type = 'close'; - block = stack.pop(); - block.close = true; + const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); - push({ type, value }); - depth--; + decompressStream.once('error', error => { + if (isEmpty && !response.readable) { + finalStream.end(); + return; + } - block = stack[stack.length - 1]; - continue; - } + finalStream.destroy(error); + }); - /** - * Comma: ',' - */ + mimicResponse(response, finalStream); + response.pipe(checker).pipe(decompressStream).pipe(finalStream); - if (value === CHAR_COMMA && depth > 0) { - if (block.ranges > 0) { - block.ranges = 0; - let open = block.nodes.shift(); - block.nodes = [open, { type: 'text', value: stringify(block) }]; - } + return finalStream; +}; - push({ type: 'comma', value }); - block.commas++; - continue; - } - /** - * Dot: '.' - */ +/***/ }), - if (value === CHAR_DOT && depth > 0 && block.commas === 0) { - let siblings = block.nodes; +/***/ 3877: +/***/ ((module) => { - if (depth === 0 || siblings.length === 0) { - push({ type: 'text', value }); - continue; - } +"use strict"; - if (prev.type === 'dot') { - block.range = []; - prev.value += value; - prev.type = 'range'; - if (block.nodes.length !== 3 && block.nodes.length !== 5) { - block.invalid = true; - block.ranges = 0; - prev.type = 'text'; - continue; - } +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProperties = [ + 'aborted', + 'complete', + 'headers', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'method', + 'rawHeaders', + 'rawTrailers', + 'setTimeout', + 'socket', + 'statusCode', + 'statusMessage', + 'trailers', + 'url' +]; - block.ranges++; - block.args = []; - continue; - } +module.exports = (fromStream, toStream) => { + if (toStream._readableState.autoDestroy) { + throw new Error('The second stream must have the `autoDestroy` option set to `false`'); + } - if (prev.type === 'range') { - siblings.pop(); + const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); - let before = siblings[siblings.length - 1]; - before.value += prev.value + value; - prev = before; - block.ranges--; - continue; - } + const properties = {}; - push({ type: 'dot', value }); - continue; - } + for (const property of fromProperties) { + // Don't overwrite existing properties. + if (property in toStream) { + continue; + } - /** - * Text - */ + properties[property] = { + get() { + const value = fromStream[property]; + const isFunction = typeof value === 'function'; - push({ type: 'text', value }); - } + return isFunction ? value.bind(fromStream) : value; + }, + set(value) { + fromStream[property] = value; + }, + enumerable: true, + configurable: false + }; + } - // Mark imbalanced braces and brackets as invalid - do { - block = stack.pop(); + Object.defineProperties(toStream, properties); - if (block.type !== 'root') { - block.nodes.forEach(node => { - if (!node.nodes) { - if (node.type === 'open') node.isOpen = true; - if (node.type === 'close') node.isClose = true; - if (!node.nodes) node.type = 'text'; - node.invalid = true; - } - }); + fromStream.once('aborted', () => { + toStream.destroy(); - // get the location of the block on parent.nodes (block's siblings) - let parent = stack[stack.length - 1]; - let index = parent.nodes.indexOf(block); - // replace the (invalid) block with it's nodes - parent.nodes.splice(index, 1, ...block.nodes); - } - } while (stack.length > 0); + toStream.emit('aborted'); + }); - push({ type: 'eos' }); - return ast; -}; + fromStream.once('close', () => { + if (fromStream.complete) { + if (toStream.readable) { + toStream.once('end', () => { + toStream.emit('close'); + }); + } else { + toStream.emit('close'); + } + } else { + toStream.emit('close'); + } + }); -module.exports = parse; + return toStream; +}; /***/ }), -/***/ 4810: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6214: +/***/ ((module, exports) => { "use strict"; - -const utils = __nccwpck_require__(7691); - -module.exports = (ast, options = {}) => { - let stringify = (node, parent = {}) => { - let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); - let invalidNode = node.invalid === true && options.escapeInvalid === true; - let output = ''; - - if (node.value) { - if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { - return '\\' + node.value; - } - return node.value; +Object.defineProperty(exports, "__esModule", ({ value: true })); +function isTLSSocket(socket) { + return socket.encrypted; +} +const deferToConnect = (socket, fn) => { + let listeners; + if (typeof fn === 'function') { + const connect = fn; + listeners = { connect }; } - - if (node.value) { - return node.value; + else { + listeners = fn; } - - if (node.nodes) { - for (let child of node.nodes) { - output += stringify(child); - } + const hasConnectListener = typeof listeners.connect === 'function'; + const hasSecureConnectListener = typeof listeners.secureConnect === 'function'; + const hasCloseListener = typeof listeners.close === 'function'; + const onConnect = () => { + if (hasConnectListener) { + listeners.connect(); + } + if (isTLSSocket(socket) && hasSecureConnectListener) { + if (socket.authorized) { + listeners.secureConnect(); + } + else if (!socket.authorizationError) { + socket.once('secureConnect', listeners.secureConnect); + } + } + if (hasCloseListener) { + socket.once('close', listeners.close); + } + }; + if (socket.writable && !socket.connecting) { + onConnect(); + } + else if (socket.connecting) { + socket.once('connect', onConnect); + } + else if (socket.destroyed && hasCloseListener) { + listeners.close(socket._hadError); } - return output; - }; - - return stringify(ast); }; - +exports["default"] = deferToConnect; +// For CommonJS default export support +module.exports = deferToConnect; +module.exports["default"] = deferToConnect; /***/ }), -/***/ 7691: +/***/ 8932: /***/ ((__unused_webpack_module, exports) => { "use strict"; -exports.isInteger = num => { - if (typeof num === 'number') { - return Number.isInteger(num); - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isInteger(Number(num)); - } - return false; -}; - -/** - * Find a node of the given type - */ - -exports.find = (node, type) => node.nodes.find(node => node.type === type); - -/** - * Find a node of the given type - */ - -exports.exceedsLimit = (min, max, step = 1, limit) => { - if (limit === false) return false; - if (!exports.isInteger(min) || !exports.isInteger(max)) return false; - return ((Number(max) - Number(min)) / Number(step)) >= limit; -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * Escape the given node with '\\' before node.value - */ +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -exports.escapeNode = (block, n = 0, type) => { - let node = block.nodes[n]; - if (!node) return; + /* istanbul ignore next */ - if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { - if (node.escaped !== true) { - node.value = '\\' + node.value; - node.escaped = true; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - } -}; - -/** - * Returns true if the given brace node should be enclosed in literal braces - */ - -exports.encloseBrace = node => { - if (node.type !== 'brace') return false; - if ((node.commas >> 0 + node.ranges >> 0) === 0) { - node.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a brace node is invalid. - */ - -exports.isInvalidBrace = block => { - if (block.type !== 'brace') return false; - if (block.invalid === true || block.dollar) return true; - if ((block.commas >> 0 + block.ranges >> 0) === 0) { - block.invalid = true; - return true; - } - if (block.open !== true || block.close !== true) { - block.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a node is an open or close node - */ -exports.isOpenOrClose = node => { - if (node.type === 'open' || node.type === 'close') { - return true; + this.name = 'Deprecation'; } - return node.open === true || node.close === true; -}; - -/** - * Reduce an array of text nodes. - */ - -exports.reduce = nodes => nodes.reduce((acc, node) => { - if (node.type === 'text') acc.push(node.value); - if (node.type === 'range') node.type = 'text'; - return acc; -}, []); -/** - * Flatten an array - */ +} -exports.flatten = (...args) => { - const result = []; - const flat = arr => { - for (let i = 0; i < arr.length; i++) { - let ele = arr[i]; - Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele); - } - return result; - }; - flat(args); - return result; -}; +exports.Deprecation = Deprecation; /***/ }), -/***/ 791: +/***/ 1205: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -/*! - * fill-range - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Licensed under the MIT License. - */ - - - -const util = __nccwpck_require__(1669); -const toRegexRange = __nccwpck_require__(6867); - -const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); - -const transform = toNumber => { - return value => toNumber === true ? Number(value) : String(value); -}; - -const isValidValue = value => { - return typeof value === 'number' || (typeof value === 'string' && value !== ''); -}; - -const isNumber = num => Number.isInteger(+num); +var once = __nccwpck_require__(1223); -const zeros = input => { - let value = `${input}`; - let index = -1; - if (value[0] === '-') value = value.slice(1); - if (value === '0') return false; - while (value[++index] === '0'); - return index > 0; -}; +var noop = function() {}; -const stringify = (start, end, options) => { - if (typeof start === 'string' || typeof end === 'string') { - return true; - } - return options.stringify === true; +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; }; -const pad = (input, maxLength, toNumber) => { - if (maxLength > 0) { - let dash = input[0] === '-' ? '-' : ''; - if (dash) input = input.slice(1); - input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); - } - if (toNumber === false) { - return String(input); - } - return input; +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 }; -const toMaxLen = (input, maxLength) => { - let negative = input[0] === '-' ? '-' : ''; - if (negative) { - input = input.slice(1); - maxLength--; - } - while (input.length < maxLength) input = '0' + input; - return negative ? ('-' + input) : input; -}; +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; -const toSequence = (parts, options) => { - parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + callback = once(callback || noop); - let prefix = options.capture ? '' : '?:'; - let positives = ''; - let negatives = ''; - let result; + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; - if (parts.positives.length) { - positives = parts.positives.join('|'); - } + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; - if (parts.negatives.length) { - negatives = `-(${prefix}${parts.negatives.join('|')})`; - } + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; - if (positives && negatives) { - result = `${positives}|${negatives}`; - } else { - result = positives || negatives; - } + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; - if (options.wrap) { - return `(${prefix}${result})`; - } + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; - return result; -}; + var onerror = function(err) { + callback.call(stream, err); + }; -const toRange = (a, b, isNumbers, options) => { - if (isNumbers) { - return toRegexRange(a, b, { wrap: false, ...options }); - } + var onclose = function() { + process.nextTick(onclosenexttick); + }; - let start = String.fromCharCode(a); - if (a === b) return start; + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; - let stop = String.fromCharCode(b); - return `[${start}-${stop}]`; -}; + var onrequest = function() { + stream.req.on('finish', onfinish); + }; -const toRegex = (start, end, options) => { - if (Array.isArray(start)) { - let wrap = options.wrap === true; - let prefix = options.capture ? '' : '?:'; - return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); - } - return toRegexRange(start, end, options); -}; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } -const rangeError = (...args) => { - return new RangeError('Invalid range arguments: ' + util.inspect(...args)); -}; + if (isChildProcess(stream)) stream.on('exit', onexit); -const invalidRange = (start, end, options) => { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; -}; + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); -const invalidStep = (step, options) => { - if (options.strictRanges === true) { - throw new TypeError(`Expected step "${step}" to be a number`); - } - return []; + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; }; -const fillNumbers = (start, end, step = 1, options = {}) => { - let a = Number(start); - let b = Number(end); - - if (!Number.isInteger(a) || !Number.isInteger(b)) { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; - } +module.exports = eos; - // fix negative zero - if (a === 0) a = 0; - if (b === 0) b = 0; - let descending = a > b; - let startString = String(start); - let endString = String(end); - let stepString = String(step); - step = Math.max(Math.abs(step), 1); +/***/ }), - let padded = zeros(startString) || zeros(endString) || zeros(stepString); - let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; - let toNumber = padded === false && stringify(start, end, options) === false; - let format = options.transform || transform(toNumber); +/***/ 4460: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (options.toRegex && step === 1) { - return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); - } +"use strict"; - let parts = { negatives: [], positives: [] }; - let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); - let range = []; - let index = 0; - while (descending ? a >= b : a <= b) { - if (options.toRegex === true && step > 1) { - push(a); - } else { - range.push(pad(format(a, index), maxLen, toNumber)); - } - a = descending ? a - step : a + step; - index++; - } +var isGlob = __nccwpck_require__(4466); +var pathPosixDirname = (__nccwpck_require__(1017).posix.dirname); +var isWin32 = (__nccwpck_require__(2037).platform)() === 'win32'; - if (options.toRegex === true) { - return step > 1 - ? toSequence(parts, options) - : toRegex(range, null, { wrap: false, ...options }); - } +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - return range; -}; +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); -const fillLetters = (start, end, step = 1, options = {}) => { - if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { - return invalidRange(start, end, options); + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); } - - let format = options.transform || (val => String.fromCharCode(val)); - let a = `${start}`.charCodeAt(0); - let b = `${end}`.charCodeAt(0); - - let descending = a > b; - let min = Math.min(a, b); - let max = Math.max(a, b); - - if (options.toRegex && step === 1) { - return toRange(min, max, false, options); + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; } - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - range.push(format(a, index)); - a = descending ? a - step : a + step; - index++; - } + // preserves full path in case of trailing path separator + str += 'a'; - if (options.toRegex === true) { - return toRegex(range, null, { wrap: false, options }); - } + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); - return range; + // remove escape chars and return result + return str.replace(escaped, '$1'); }; -const fill = (start, end, step, options = {}) => { - if (end == null && isValidValue(start)) { - return [start]; - } - - if (!isValidValue(start) || !isValidValue(end)) { - return invalidRange(start, end, options); - } - - if (typeof step === 'function') { - return fill(start, end, 1, { transform: step }); - } - - if (isObject(step)) { - return fill(start, end, 0, step); - } - - let opts = { ...options }; - if (opts.capture === true) opts.wrap = true; - step = step || opts.step || 1; - if (!isNumber(step)) { - if (step != null && !isObject(step)) return invalidStep(step, opts); - return fill(start, end, 1, step); - } +/***/ }), - if (isNumber(start) && isNumber(end)) { - return fillNumbers(start, end, step, opts); - } +/***/ 3664: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); -}; +"use strict"; -module.exports = fill; +const taskManager = __nccwpck_require__(2708); +const async_1 = __nccwpck_require__(5679); +const stream_1 = __nccwpck_require__(4630); +const sync_1 = __nccwpck_require__(2405); +const settings_1 = __nccwpck_require__(952); +const utils = __nccwpck_require__(8223); +async function FastGlob(source, options) { + assertPatternsInput(source); + const works = getWorks(source, async_1.default, options); + const result = await Promise.all(works); + return utils.array.flatten(result); +} +// https://github.com/typescript-eslint/typescript-eslint/issues/60 +// eslint-disable-next-line no-redeclare +(function (FastGlob) { + FastGlob.glob = FastGlob; + FastGlob.globSync = sync; + FastGlob.globStream = stream; + FastGlob.async = FastGlob; + function sync(source, options) { + assertPatternsInput(source); + const works = getWorks(source, sync_1.default, options); + return utils.array.flatten(works); + } + FastGlob.sync = sync; + function stream(source, options) { + assertPatternsInput(source); + const works = getWorks(source, stream_1.default, options); + /** + * The stream returned by the provider cannot work with an asynchronous iterator. + * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. + * This affects performance (+25%). I don't see best solution right now. + */ + return utils.stream.merge(works); + } + FastGlob.stream = stream; + function generateTasks(source, options) { + assertPatternsInput(source); + const patterns = [].concat(source); + const settings = new settings_1.default(options); + return taskManager.generate(patterns, settings); + } + FastGlob.generateTasks = generateTasks; + function isDynamicPattern(source, options) { + assertPatternsInput(source); + const settings = new settings_1.default(options); + return utils.pattern.isDynamicPattern(source, settings); + } + FastGlob.isDynamicPattern = isDynamicPattern; + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escape(source); + } + FastGlob.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPathToPattern(source); + } + FastGlob.convertPathToPattern = convertPathToPattern; + let posix; + (function (posix) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapePosixPath(source); + } + posix.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPosixPathToPattern(source); + } + posix.convertPathToPattern = convertPathToPattern; + })(posix = FastGlob.posix || (FastGlob.posix = {})); + let win32; + (function (win32) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapeWindowsPath(source); + } + win32.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertWindowsPathToPattern(source); + } + win32.convertPathToPattern = convertPathToPattern; + })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); +})(FastGlob || (FastGlob = {})); +function getWorks(source, _Provider, options) { + const patterns = [].concat(source); + const settings = new settings_1.default(options); + const tasks = taskManager.generate(patterns, settings); + const provider = new _Provider(settings); + return tasks.map(provider.read, provider); +} +function assertPatternsInput(input) { + const source = [].concat(input); + const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); + if (!isValidSource) { + throw new TypeError('Patterns must be a string (non empty) or an array of strings'); + } +} +module.exports = FastGlob; /***/ }), -/***/ 2840: -/***/ ((module) => { +/***/ 2708: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ - - -module.exports = function(num) { - if (typeof num === 'number') { - return num - num === 0; - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); - } - return false; -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; +const utils = __nccwpck_require__(8223); +function generate(input, settings) { + const patterns = processPatterns(input, settings); + const ignore = processPatterns(settings.ignore, settings); + const positivePatterns = getPositivePatterns(patterns); + const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); + const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); + const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); + const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); + const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); + return staticTasks.concat(dynamicTasks); +} +exports.generate = generate; +function processPatterns(input, settings) { + let patterns = input; + /** + * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry + * and some problems with the micromatch package (see fast-glob issues: #365, #394). + * + * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown + * in matching in the case of a large set of patterns after expansion. + */ + if (settings.braceExpansion) { + patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); + } + /** + * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used + * at any nesting level. + * + * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change + * the pattern in the filter before creating a regular expression. There is no need to change the patterns + * in the application. Only on the input. + */ + if (settings.baseNameMatch) { + patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); + } + /** + * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. + */ + return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); +} +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +function convertPatternsToTasks(positive, negative, dynamic) { + const tasks = []; + const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); + const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); + const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); + const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); + tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); + /* + * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory + * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. + */ + if ('.' in insideCurrentDirectoryGroup) { + tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); + } + else { + tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); + } + return tasks; +} +exports.convertPatternsToTasks = convertPatternsToTasks; +function getPositivePatterns(patterns) { + return utils.pattern.getPositivePatterns(patterns); +} +exports.getPositivePatterns = getPositivePatterns; +function getNegativePatternsAsPositive(patterns, ignore) { + const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); + const positive = negative.map(utils.pattern.convertToPositivePattern); + return positive; +} +exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; +function groupPatternsByBaseDirectory(patterns) { + const group = {}; + return patterns.reduce((collection, pattern) => { + const base = utils.pattern.getBaseDirectory(pattern); + if (base in collection) { + collection[base].push(pattern); + } + else { + collection[base] = [pattern]; + } + return collection; + }, group); +} +exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; +function convertPatternGroupsToTasks(positive, negative, dynamic) { + return Object.keys(positive).map((base) => { + return convertPatternGroupToTask(base, positive[base], negative, dynamic); + }); +} +exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; +function convertPatternGroupToTask(base, positive, negative, dynamic) { + return { + dynamic, + positive, + negative, + base, + patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) + }; +} +exports.convertPatternGroupToTask = convertPatternGroupToTask; /***/ }), -/***/ 3913: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 5679: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const async_1 = __nccwpck_require__(7747); +const provider_1 = __nccwpck_require__(257); +class ProviderAsync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new async_1.default(this._settings); + } + async read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = await this.api(root, task, options); + return entries.map((entry) => options.transform(entry)); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderAsync; -const util = __nccwpck_require__(1669); -const braces = __nccwpck_require__(5582); -const picomatch = __nccwpck_require__(8569); -const utils = __nccwpck_require__(479); -const isEmptyString = val => typeof val === 'string' && (val === '' || val === './'); -/** - * Returns an array of strings that match one or more glob patterns. - * - * ```js - * const mm = require('micromatch'); - * // mm(list, patterns[, options]); - * - * console.log(mm(['a.js', 'a.txt'], ['*.js'])); - * //=> [ 'a.js' ] - * ``` - * @param {String|Array} list List of strings to match. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} options See available [options](#options) - * @return {Array} Returns an array of matches - * @summary false - * @api public - */ +/***/ }), -const micromatch = (list, patterns, options) => { - patterns = [].concat(patterns); - list = [].concat(list); +/***/ 6983: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let omit = new Set(); - let keep = new Set(); - let items = new Set(); - let negatives = 0; +"use strict"; - let onResult = state => { - items.add(state.output); - if (options && options.onResult) { - options.onResult(state); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(8223); +const partial_1 = __nccwpck_require__(5295); +class DeepFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; } - }; - - for (let i = 0; i < patterns.length; i++) { - let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); - let negated = isMatch.state.negated || isMatch.state.negatedExtglob; - if (negated) negatives++; + getFilter(basePath, positive, negative) { + const matcher = this._getMatcher(positive); + const negativeRe = this._getNegativePatternsRe(negative); + return (entry) => this._filter(basePath, entry, matcher, negativeRe); + } + _getMatcher(patterns) { + return new partial_1.default(patterns, this._settings, this._micromatchOptions); + } + _getNegativePatternsRe(patterns) { + const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); + return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); + } + _filter(basePath, entry, matcher, negativeRe) { + if (this._isSkippedByDeep(basePath, entry.path)) { + return false; + } + if (this._isSkippedSymbolicLink(entry)) { + return false; + } + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._isSkippedByPositivePatterns(filepath, matcher)) { + return false; + } + return this._isSkippedByNegativePatterns(filepath, negativeRe); + } + _isSkippedByDeep(basePath, entryPath) { + /** + * Avoid unnecessary depth calculations when it doesn't matter. + */ + if (this._settings.deep === Infinity) { + return false; + } + return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; + } + _getEntryLevel(basePath, entryPath) { + const entryPathDepth = entryPath.split('/').length; + if (basePath === '') { + return entryPathDepth; + } + const basePathDepth = basePath.split('/').length; + return entryPathDepth - basePathDepth; + } + _isSkippedSymbolicLink(entry) { + return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); + } + _isSkippedByPositivePatterns(entryPath, matcher) { + return !this._settings.baseNameMatch && !matcher.match(entryPath); + } + _isSkippedByNegativePatterns(entryPath, patternsRe) { + return !utils.pattern.matchAny(entryPath, patternsRe); + } +} +exports["default"] = DeepFilter; - for (let item of list) { - let matched = isMatch(item, true); - let match = negated ? !matched.isMatch : matched.isMatch; - if (!match) continue; +/***/ }), - if (negated) { - omit.add(matched.output); - } else { - omit.delete(matched.output); - keep.add(matched.output); - } - } - } +/***/ 1343: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let result = negatives === patterns.length ? [...items] : [...keep]; - let matches = result.filter(item => !omit.has(item)); +"use strict"; - if (options && matches.length === 0) { - if (options.failglob === true) { - throw new Error(`No matches found for "${patterns.join(', ')}"`); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(8223); +class EntryFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this.index = new Map(); } - - if (options.nonull === true || options.nullglob === true) { - return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; + getFilter(positive, negative) { + const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); + const negativeRe = utils.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })); + return (entry) => this._filter(entry, positiveRe, negativeRe); } - } + _filter(entry, positiveRe, negativeRe) { + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._settings.unique && this._isDuplicateEntry(filepath)) { + return false; + } + if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { + return false; + } + if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) { + return false; + } + const isDirectory = entry.dirent.isDirectory(); + const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory); + if (this._settings.unique && isMatched) { + this._createIndexRecord(filepath); + } + return isMatched; + } + _isDuplicateEntry(filepath) { + return this.index.has(filepath); + } + _createIndexRecord(filepath) { + this.index.set(filepath, undefined); + } + _onlyFileFilter(entry) { + return this._settings.onlyFiles && !entry.dirent.isFile(); + } + _onlyDirectoryFilter(entry) { + return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { + if (!this._settings.absolute) { + return false; + } + const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); + return utils.pattern.matchAny(fullpath, patternsRe); + } + _isMatchToPatterns(filepath, patternsRe, isDirectory) { + // Trying to match files and directories by patterns. + const isMatched = utils.pattern.matchAny(filepath, patternsRe); + // A pattern with a trailling slash can be used for directory matching. + // To apply such pattern, we need to add a tralling slash to the path. + if (!isMatched && isDirectory) { + return utils.pattern.matchAny(filepath + '/', patternsRe); + } + return isMatched; + } +} +exports["default"] = EntryFilter; - return matches; -}; -/** - * Backwards compatibility - */ +/***/ }), -micromatch.match = micromatch; +/***/ 6654: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Returns a matcher function from the given glob `pattern` and `options`. - * The returned function takes a string to match as its only argument and returns - * true if the string is a match. - * - * ```js - * const mm = require('micromatch'); - * // mm.matcher(pattern[, options]); - * - * const isMatch = mm.matcher('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @param {String} `pattern` Glob pattern - * @param {Object} `options` - * @return {Function} Returns a matcher function. - * @api public - */ +"use strict"; -micromatch.matcher = (pattern, options) => picomatch(pattern, options); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(8223); +class ErrorFilter { + constructor(_settings) { + this._settings = _settings; + } + getFilter() { + return (error) => this._isNonFatalError(error); + } + _isNonFatalError(error) { + return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; + } +} +exports["default"] = ErrorFilter; -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const mm = require('micromatch'); - * // mm.isMatch(string, patterns[, options]); - * - * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(mm.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ -micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); +/***/ }), -/** - * Backwards compatibility - */ +/***/ 2576: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -micromatch.any = micromatch.isMatch; +"use strict"; -/** - * Returns a list of strings that _**do not match any**_ of the given `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.not(list, patterns[, options]); - * - * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); - * //=> ['b.b', 'c.c'] - * ``` - * @param {Array} `list` Array of strings to match. - * @param {String|Array} `patterns` One or more glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array} Returns an array of strings that **do not match** the given patterns. - * @api public - */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(8223); +class Matcher { + constructor(_patterns, _settings, _micromatchOptions) { + this._patterns = _patterns; + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this._storage = []; + this._fillStorage(); + } + _fillStorage() { + for (const pattern of this._patterns) { + const segments = this._getPatternSegments(pattern); + const sections = this._splitSegmentsIntoSections(segments); + this._storage.push({ + complete: sections.length <= 1, + pattern, + segments, + sections + }); + } + } + _getPatternSegments(pattern) { + const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); + return parts.map((part) => { + const dynamic = utils.pattern.isDynamicPattern(part, this._settings); + if (!dynamic) { + return { + dynamic: false, + pattern: part + }; + } + return { + dynamic: true, + pattern: part, + patternRe: utils.pattern.makeRe(part, this._micromatchOptions) + }; + }); + } + _splitSegmentsIntoSections(segments) { + return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); + } +} +exports["default"] = Matcher; -micromatch.not = (list, patterns, options = {}) => { - patterns = [].concat(patterns).map(String); - let result = new Set(); - let items = []; - let onResult = state => { - if (options.onResult) options.onResult(state); - items.push(state.output); - }; +/***/ }), - let matches = micromatch(list, patterns, { ...options, onResult }); +/***/ 5295: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - for (let item of items) { - if (!matches.includes(item)) { - result.add(item); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const matcher_1 = __nccwpck_require__(2576); +class PartialMatcher extends matcher_1.default { + match(filepath) { + const parts = filepath.split('/'); + const levels = parts.length; + const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); + for (const pattern of patterns) { + const section = pattern.sections[0]; + /** + * In this case, the pattern has a globstar and we must read all directories unconditionally, + * but only if the level has reached the end of the first group. + * + * fixtures/{a,b}/** + * ^ true/false ^ always true + */ + if (!pattern.complete && levels > section.length) { + return true; + } + const match = parts.every((part, index) => { + const segment = pattern.segments[index]; + if (segment.dynamic && segment.patternRe.test(part)) { + return true; + } + if (!segment.dynamic && segment.pattern === part) { + return true; + } + return false; + }); + if (match) { + return true; + } + } + return false; } - } - return [...result]; -}; +} +exports["default"] = PartialMatcher; -/** - * Returns true if the given `string` contains the given pattern. Similar - * to [.isMatch](#isMatch) but the pattern can match any part of the string. - * - * ```js - * var mm = require('micromatch'); - * // mm.contains(string, pattern[, options]); - * - * console.log(mm.contains('aa/bb/cc', '*b')); - * //=> true - * console.log(mm.contains('aa/bb/cc', '*d')); - * //=> false - * ``` - * @param {String} `str` The string to match. - * @param {String|Array} `patterns` Glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if the patter matches any part of `str`. - * @api public - */ -micromatch.contains = (str, pattern, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } +/***/ }), - if (Array.isArray(pattern)) { - return pattern.some(p => micromatch.contains(str, p, options)); - } +/***/ 257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (typeof pattern === 'string') { - if (isEmptyString(str) || isEmptyString(pattern)) { - return false; - } +"use strict"; - if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { - return true; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const path = __nccwpck_require__(1017); +const deep_1 = __nccwpck_require__(6983); +const entry_1 = __nccwpck_require__(1343); +const error_1 = __nccwpck_require__(6654); +const entry_2 = __nccwpck_require__(4029); +class Provider { + constructor(_settings) { + this._settings = _settings; + this.errorFilter = new error_1.default(this._settings); + this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); + this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); + this.entryTransformer = new entry_2.default(this._settings); } - } + _getRootDirectory(task) { + return path.resolve(this._settings.cwd, task.base); + } + _getReaderOptions(task) { + const basePath = task.base === '.' ? '' : task.base; + return { + basePath, + pathSegmentSeparator: '/', + concurrency: this._settings.concurrency, + deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), + entryFilter: this.entryFilter.getFilter(task.positive, task.negative), + errorFilter: this.errorFilter.getFilter(), + followSymbolicLinks: this._settings.followSymbolicLinks, + fs: this._settings.fs, + stats: this._settings.stats, + throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, + transform: this.entryTransformer.getTransformer() + }; + } + _getMicromatchOptions() { + return { + dot: this._settings.dot, + matchBase: this._settings.baseNameMatch, + nobrace: !this._settings.braceExpansion, + nocase: !this._settings.caseSensitiveMatch, + noext: !this._settings.extglob, + noglobstar: !this._settings.globstar, + posix: true, + strictSlashes: false + }; + } +} +exports["default"] = Provider; - return micromatch.isMatch(str, pattern, { ...options, contains: true }); -}; - -/** - * Filter the keys of the given object with the given `glob` pattern - * and `options`. Does not attempt to match nested keys. If you need this feature, - * use [glob-object][] instead. - * - * ```js - * const mm = require('micromatch'); - * // mm.matchKeys(object, patterns[, options]); - * - * const obj = { aa: 'a', ab: 'b', ac: 'c' }; - * console.log(mm.matchKeys(obj, '*b')); - * //=> { ab: 'b' } - * ``` - * @param {Object} `object` The object with keys to filter. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Object} Returns an object with only keys that match the given patterns. - * @api public - */ -micromatch.matchKeys = (obj, patterns, options) => { - if (!utils.isObject(obj)) { - throw new TypeError('Expected the first argument to be an object'); - } - let keys = micromatch(Object.keys(obj), patterns, options); - let res = {}; - for (let key of keys) res[key] = obj[key]; - return res; -}; +/***/ }), -/** - * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.some(list, patterns[, options]); - * - * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // true - * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ +/***/ 4630: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -micromatch.some = (list, patterns, options) => { - let items = [].concat(list); +"use strict"; - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (items.some(item => isMatch(item))) { - return true; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const stream_1 = __nccwpck_require__(2781); +const stream_2 = __nccwpck_require__(2083); +const provider_1 = __nccwpck_require__(257); +class ProviderStream extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_2.default(this._settings); } - } - return false; -}; + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const source = this.api(root, task, options); + const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); + source + .once('error', (error) => destination.emit('error', error)) + .on('data', (entry) => destination.emit('data', options.transform(entry))) + .once('end', () => destination.emit('end')); + destination + .once('close', () => source.destroy()); + return destination; + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderStream; -/** - * Returns true if every string in the given `list` matches - * any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.every(list, patterns[, options]); - * - * console.log(mm.every('foo.js', ['foo.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // false - * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ -micromatch.every = (list, patterns, options) => { - let items = [].concat(list); +/***/ }), - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (!items.every(item => isMatch(item))) { - return false; - } - } - return true; -}; +/***/ 2405: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Returns true if **all** of the given `patterns` match - * the specified string. - * - * ```js - * const mm = require('micromatch'); - * // mm.all(string, patterns[, options]); - * - * console.log(mm.all('foo.js', ['foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); - * // false - * - * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); - * // true - * ``` - * @param {String|Array} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ +"use strict"; -micromatch.all = (str, patterns, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const sync_1 = __nccwpck_require__(8821); +const provider_1 = __nccwpck_require__(257); +class ProviderSync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new sync_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = this.api(root, task, options); + return entries.map(options.transform); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderSync; - return [].concat(patterns).every(p => picomatch(p, options)(str)); -}; -/** - * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. - * - * ```js - * const mm = require('micromatch'); - * // mm.capture(pattern, string[, options]); - * - * console.log(mm.capture('test/*.js', 'test/foo.js')); - * //=> ['foo'] - * console.log(mm.capture('test/*.js', 'foo/bar.css')); - * //=> null - * ``` - * @param {String} `glob` Glob pattern to use for matching. - * @param {String} `input` String to match - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`. - * @api public - */ +/***/ }), -micromatch.capture = (glob, input, options) => { - let posix = utils.isWindows(options); - let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); - let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); +/***/ 4029: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (match) { - return match.slice(1).map(v => v === void 0 ? '' : v); - } -}; +"use strict"; -/** - * Create a regular expression from the given glob `pattern`. - * - * ```js - * const mm = require('micromatch'); - * // mm.makeRe(pattern[, options]); - * - * console.log(mm.makeRe('*.js')); - * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ - * ``` - * @param {String} `pattern` A glob pattern to convert to regex. - * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(8223); +class EntryTransformer { + constructor(_settings) { + this._settings = _settings; + } + getTransformer() { + return (entry) => this._transform(entry); + } + _transform(entry) { + let filepath = entry.path; + if (this._settings.absolute) { + filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); + filepath = utils.path.unixify(filepath); + } + if (this._settings.markDirectories && entry.dirent.isDirectory()) { + filepath += '/'; + } + if (!this._settings.objectMode) { + return filepath; + } + return Object.assign(Object.assign({}, entry), { path: filepath }); + } +} +exports["default"] = EntryTransformer; -micromatch.makeRe = (...args) => picomatch.makeRe(...args); -/** - * Scan a glob pattern to separate the pattern into segments. Used - * by the [split](#split) method. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.scan(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ +/***/ }), -micromatch.scan = (...args) => picomatch.scan(...args); +/***/ 7747: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const mm = require('micromatch'); - * const state = mm(pattern[, options]); - * ``` - * @param {String} `glob` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as regex source string. - * @api public - */ +"use strict"; -micromatch.parse = (patterns, options) => { - let res = []; - for (let pattern of [].concat(patterns || [])) { - for (let str of braces(String(pattern), options)) { - res.push(picomatch.parse(str, options)); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fsWalk = __nccwpck_require__(6026); +const reader_1 = __nccwpck_require__(5582); +const stream_1 = __nccwpck_require__(2083); +class ReaderAsync extends reader_1.default { + constructor() { + super(...arguments); + this._walkAsync = fsWalk.walk; + this._readerStream = new stream_1.default(this._settings); } - } - return res; -}; - -/** - * Process the given brace `pattern`. - * - * ```js - * const { braces } = require('micromatch'); - * console.log(braces('foo/{a,b,c}/bar')); - * //=> [ 'foo/(a|b|c)/bar' ] - * - * console.log(braces('foo/{a,b,c}/bar', { expand: true })); - * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] - * ``` - * @param {String} `pattern` String with brace pattern to process. - * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. - * @return {Array} - * @api public - */ + dynamic(root, options) { + return new Promise((resolve, reject) => { + this._walkAsync(root, options, (error, entries) => { + if (error === null) { + resolve(entries); + } + else { + reject(error); + } + }); + }); + } + async static(patterns, options) { + const entries = []; + const stream = this._readerStream.static(patterns, options); + // After #235, replace it with an asynchronous iterator. + return new Promise((resolve, reject) => { + stream.once('error', reject); + stream.on('data', (entry) => entries.push(entry)); + stream.once('end', () => resolve(entries)); + }); + } +} +exports["default"] = ReaderAsync; -micromatch.braces = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { - return [pattern]; - } - return braces(pattern, options); -}; -/** - * Expand braces - */ +/***/ }), -micromatch.braceExpand = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - return micromatch.braces(pattern, { ...options, expand: true }); -}; +/***/ 5582: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Expose micromatch - */ +"use strict"; -module.exports = micromatch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const path = __nccwpck_require__(1017); +const fsStat = __nccwpck_require__(109); +const utils = __nccwpck_require__(8223); +class Reader { + constructor(_settings) { + this._settings = _settings; + this._fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this._settings.followSymbolicLinks, + fs: this._settings.fs, + throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks + }); + } + _getFullEntryPath(filepath) { + return path.resolve(this._settings.cwd, filepath); + } + _makeEntry(stats, pattern) { + const entry = { + name: pattern, + path: pattern, + dirent: utils.fs.createDirentFromStats(pattern, stats) + }; + if (this._settings.stats) { + entry.stats = stats; + } + return entry; + } + _isFatalError(error) { + return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; + } +} +exports["default"] = Reader; /***/ }), -/***/ 6867: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +const stream_1 = __nccwpck_require__(2781); +const fsStat = __nccwpck_require__(109); +const fsWalk = __nccwpck_require__(6026); +const reader_1 = __nccwpck_require__(5582); +class ReaderStream extends reader_1.default { + constructor() { + super(...arguments); + this._walkStream = fsWalk.walkStream; + this._stat = fsStat.stat; + } + dynamic(root, options) { + return this._walkStream(root, options); + } + static(patterns, options) { + const filepaths = patterns.map(this._getFullEntryPath, this); + const stream = new stream_1.PassThrough({ objectMode: true }); + stream._write = (index, _enc, done) => { + return this._getEntry(filepaths[index], patterns[index], options) + .then((entry) => { + if (entry !== null && options.entryFilter(entry)) { + stream.push(entry); + } + if (index === filepaths.length - 1) { + stream.end(); + } + done(); + }) + .catch(done); + }; + for (let i = 0; i < filepaths.length; i++) { + stream.write(i); + } + return stream; + } + _getEntry(filepath, pattern, options) { + return this._getStat(filepath) + .then((stats) => this._makeEntry(stats, pattern)) + .catch((error) => { + if (options.errorFilter(error)) { + return null; + } + throw error; + }); + } + _getStat(filepath) { + return new Promise((resolve, reject) => { + this._stat(filepath, this._fsStatSettings, (error, stats) => { + return error === null ? resolve(stats) : reject(error); + }); + }); + } +} +exports["default"] = ReaderStream; -const isNumber = __nccwpck_require__(2840); +/***/ }), -const toRegexRange = (min, max, options) => { - if (isNumber(min) === false) { - throw new TypeError('toRegexRange: expected the first argument to be a number'); - } +/***/ 8821: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (max === void 0 || min === max) { - return String(min); - } +"use strict"; - if (isNumber(max) === false) { - throw new TypeError('toRegexRange: expected the second argument to be a number.'); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fsStat = __nccwpck_require__(109); +const fsWalk = __nccwpck_require__(6026); +const reader_1 = __nccwpck_require__(5582); +class ReaderSync extends reader_1.default { + constructor() { + super(...arguments); + this._walkSync = fsWalk.walkSync; + this._statSync = fsStat.statSync; + } + dynamic(root, options) { + return this._walkSync(root, options); + } + static(patterns, options) { + const entries = []; + for (const pattern of patterns) { + const filepath = this._getFullEntryPath(pattern); + const entry = this._getEntry(filepath, pattern, options); + if (entry === null || !options.entryFilter(entry)) { + continue; + } + entries.push(entry); + } + return entries; + } + _getEntry(filepath, pattern, options) { + try { + const stats = this._getStat(filepath); + return this._makeEntry(stats, pattern); + } + catch (error) { + if (options.errorFilter(error)) { + return null; + } + throw error; + } + } + _getStat(filepath) { + return this._statSync(filepath, this._fsStatSettings); + } +} +exports["default"] = ReaderSync; - let opts = { relaxZeros: true, ...options }; - if (typeof opts.strictZeros === 'boolean') { - opts.relaxZeros = opts.strictZeros === false; - } - let relax = String(opts.relaxZeros); - let shorthand = String(opts.shorthand); - let capture = String(opts.capture); - let wrap = String(opts.wrap); - let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; +/***/ }), - if (toRegexRange.cache.hasOwnProperty(cacheKey)) { - return toRegexRange.cache[cacheKey].result; - } +/***/ 952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let a = Math.min(min, max); - let b = Math.max(min, max); +"use strict"; - if (Math.abs(a - b) === 1) { - let result = min + '|' + max; - if (opts.capture) { - return `(${result})`; - } - if (opts.wrap === false) { - return result; - } - return `(?:${result})`; - } - - let isPadded = hasPadding(min) || hasPadding(max); - let state = { min, max, a, b }; - let positives = []; - let negatives = []; - - if (isPadded) { - state.isPadded = isPadded; - state.maxLen = String(state.max).length; - } - - if (a < 0) { - let newMin = b < 0 ? Math.abs(b) : 1; - negatives = splitToPatterns(newMin, Math.abs(a), state, opts); - a = state.a = 0; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; +const fs = __nccwpck_require__(7147); +const os = __nccwpck_require__(2037); +/** + * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. + * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 + */ +const CPU_COUNT = Math.max(os.cpus().length, 1); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + lstatSync: fs.lstatSync, + stat: fs.stat, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +class Settings { + constructor(_options = {}) { + this._options = _options; + this.absolute = this._getValue(this._options.absolute, false); + this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); + this.braceExpansion = this._getValue(this._options.braceExpansion, true); + this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); + this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); + this.cwd = this._getValue(this._options.cwd, process.cwd()); + this.deep = this._getValue(this._options.deep, Infinity); + this.dot = this._getValue(this._options.dot, false); + this.extglob = this._getValue(this._options.extglob, true); + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); + this.fs = this._getFileSystemMethods(this._options.fs); + this.globstar = this._getValue(this._options.globstar, true); + this.ignore = this._getValue(this._options.ignore, []); + this.markDirectories = this._getValue(this._options.markDirectories, false); + this.objectMode = this._getValue(this._options.objectMode, false); + this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); + this.onlyFiles = this._getValue(this._options.onlyFiles, true); + this.stats = this._getValue(this._options.stats, false); + this.suppressErrors = this._getValue(this._options.suppressErrors, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); + this.unique = this._getValue(this._options.unique, true); + if (this.onlyDirectories) { + this.onlyFiles = false; + } + if (this.stats) { + this.objectMode = true; + } + // Remove the cast to the array in the next major (#404). + this.ignore = [].concat(this.ignore); + } + _getValue(option, value) { + return option === undefined ? value : option; + } + _getFileSystemMethods(methods = {}) { + return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + } +} +exports["default"] = Settings; - if (b >= 0) { - positives = splitToPatterns(a, b, state, opts); - } - state.negatives = negatives; - state.positives = positives; - state.result = collatePatterns(negatives, positives, opts); +/***/ }), - if (opts.capture === true) { - state.result = `(${state.result})`; - } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { - state.result = `(?:${state.result})`; - } +/***/ 5325: +/***/ ((__unused_webpack_module, exports) => { - toRegexRange.cache[cacheKey] = state; - return state.result; -}; +"use strict"; -function collatePatterns(neg, pos, options) { - let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; - let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; - let intersected = filterPatterns(neg, pos, '-?', true, options) || []; - let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); - return subpatterns.join('|'); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitWhen = exports.flatten = void 0; +function flatten(items) { + return items.reduce((collection, item) => [].concat(collection, item), []); } +exports.flatten = flatten; +function splitWhen(items, predicate) { + const result = [[]]; + let groupIndex = 0; + for (const item of items) { + if (predicate(item)) { + groupIndex++; + result[groupIndex] = []; + } + else { + result[groupIndex].push(item); + } + } + return result; +} +exports.splitWhen = splitWhen; -function splitToRanges(min, max) { - let nines = 1; - let zeros = 1; - - let stop = countNines(min, nines); - let stops = new Set([max]); - while (min <= stop && stop <= max) { - stops.add(stop); - nines += 1; - stop = countNines(min, nines); - } +/***/ }), - stop = countZeros(max + 1, zeros) - 1; +/***/ 1230: +/***/ ((__unused_webpack_module, exports) => { - while (min < stop && stop <= max) { - stops.add(stop); - zeros += 1; - stop = countZeros(max + 1, zeros) - 1; - } +"use strict"; - stops = [...stops]; - stops.sort(compare); - return stops; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isEnoentCodeError = void 0; +function isEnoentCodeError(error) { + return error.code === 'ENOENT'; } +exports.isEnoentCodeError = isEnoentCodeError; -/** - * Convert a range to a regex pattern - * @param {Number} `start` - * @param {Number} `stop` - * @return {String} - */ - -function rangeToPattern(start, stop, options) { - if (start === stop) { - return { pattern: start, count: [], digits: 0 }; - } - - let zipped = zip(start, stop); - let digits = zipped.length; - let pattern = ''; - let count = 0; - for (let i = 0; i < digits; i++) { - let [startDigit, stopDigit] = zipped[i]; +/***/ }), - if (startDigit === stopDigit) { - pattern += startDigit; +/***/ 7543: +/***/ ((__unused_webpack_module, exports) => { - } else if (startDigit !== '0' || stopDigit !== '9') { - pattern += toCharacterClass(startDigit, stopDigit, options); +"use strict"; - } else { - count++; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); } - } - - if (count) { - pattern += options.shorthand === true ? '\\d' : '[0-9]'; - } - - return { pattern, count: [count], digits }; } +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; -function splitToPatterns(min, max, tok, options) { - let ranges = splitToRanges(min, max); - let tokens = []; - let start = min; - let prev; - - for (let i = 0; i < ranges.length; i++) { - let max = ranges[i]; - let obj = rangeToPattern(String(start), String(max), options); - let zeros = ''; - - if (!tok.isPadded && prev && prev.pattern === obj.pattern) { - if (prev.count.length > 1) { - prev.count.pop(); - } - prev.count.push(obj.count[0]); - prev.string = prev.pattern + toQuantifier(prev.count); - start = max + 1; - continue; - } +/***/ }), - if (tok.isPadded) { - zeros = padZeros(max, tok, options); - } +/***/ 8223: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - obj.string = zeros + obj.pattern + toQuantifier(obj.count); - tokens.push(obj); - start = max + 1; - prev = obj; - } +"use strict"; - return tokens; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; +const array = __nccwpck_require__(5325); +exports.array = array; +const errno = __nccwpck_require__(1230); +exports.errno = errno; +const fs = __nccwpck_require__(7543); +exports.fs = fs; +const path = __nccwpck_require__(3873); +exports.path = path; +const pattern = __nccwpck_require__(1221); +exports.pattern = pattern; +const stream = __nccwpck_require__(8382); +exports.stream = stream; +const string = __nccwpck_require__(2203); +exports.string = string; -function filterPatterns(arr, comparison, prefix, intersection, options) { - let result = []; - for (let ele of arr) { - let { string } = ele; +/***/ }), - // only push if _both_ are negative... - if (!intersection && !contains(comparison, 'string', string)) { - result.push(prefix + string); - } +/***/ 3873: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // or _both_ are positive - if (intersection && contains(comparison, 'string', string)) { - result.push(prefix + string); - } - } - return result; -} +"use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; +const os = __nccwpck_require__(2037); +const path = __nccwpck_require__(1017); +const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; +const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ /** - * Zip strings + * All non-escaped special characters. + * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. + * Windows: (){}[], !+@ before (, ! at the beginning. */ - -function zip(a, b) { - let arr = []; - for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); - return arr; -} - -function compare(a, b) { - return a > b ? 1 : b > a ? -1 : 0; -} - -function contains(arr, key, val) { - return arr.some(ele => ele[key] === val); +const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; +const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; +/** + * The device path (\\.\ or \\?\). + * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths + */ +const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; +/** + * All backslashes except those escaping special characters. + * Windows: !()+@{} + * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions + */ +const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +function unixify(filepath) { + return filepath.replace(/\\/g, '/'); } - -function countNines(min, len) { - return Number(String(min).slice(0, -len) + '9'.repeat(len)); +exports.unixify = unixify; +function makeAbsolute(cwd, filepath) { + return path.resolve(cwd, filepath); } - -function countZeros(integer, zeros) { - return integer - (integer % Math.pow(10, zeros)); +exports.makeAbsolute = makeAbsolute; +function removeLeadingDotSegment(entry) { + // We do not use `startsWith` because this is 10x slower than current implementation for some cases. + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (entry.charAt(0) === '.') { + const secondCharactery = entry.charAt(1); + if (secondCharactery === '/' || secondCharactery === '\\') { + return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); + } + } + return entry; } - -function toQuantifier(digits) { - let [start = 0, stop = ''] = digits; - if (stop || start > 1) { - return `{${start + (stop ? ',' + stop : '')}}`; - } - return ''; +exports.removeLeadingDotSegment = removeLeadingDotSegment; +exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; +function escapeWindowsPath(pattern) { + return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } - -function toCharacterClass(a, b, options) { - return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +exports.escapeWindowsPath = escapeWindowsPath; +function escapePosixPath(pattern) { + return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } - -function hasPadding(str) { - return /^-?(0+)\d/.test(str); +exports.escapePosixPath = escapePosixPath; +exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; +function convertWindowsPathToPattern(filepath) { + return escapeWindowsPath(filepath) + .replace(DOS_DEVICE_PATH_RE, '//$1') + .replace(WINDOWS_BACKSLASHES_RE, '/'); } - -function padZeros(value, tok, options) { - if (!tok.isPadded) { - return value; - } - - let diff = Math.abs(tok.maxLen - String(value).length); - let relax = options.relaxZeros !== false; - - switch (diff) { - case 0: - return ''; - case 1: - return relax ? '0?' : '0'; - case 2: - return relax ? '0{0,2}' : '00'; - default: { - return relax ? `0{0,${diff}}` : `0{${diff}}`; - } - } +exports.convertWindowsPathToPattern = convertWindowsPathToPattern; +function convertPosixPathToPattern(filepath) { + return escapePosixPath(filepath); } - -/** - * Cache - */ - -toRegexRange.cache = {}; -toRegexRange.clearCache = () => (toRegexRange.cache = {}); - -/** - * Expose `toRegexRange` - */ - -module.exports = toRegexRange; +exports.convertPosixPathToPattern = convertPosixPathToPattern; /***/ }), -/***/ 3664: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const taskManager = __nccwpck_require__(2708); -const async_1 = __nccwpck_require__(5679); -const stream_1 = __nccwpck_require__(4630); -const sync_1 = __nccwpck_require__(2405); -const settings_1 = __nccwpck_require__(952); -const utils = __nccwpck_require__(5444); -async function FastGlob(source, options) { - assertPatternsInput(source); - const works = getWorks(source, async_1.default, options); - const result = await Promise.all(works); - return utils.array.flatten(result); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; +const path = __nccwpck_require__(1017); +const globParent = __nccwpck_require__(4460); +const micromatch = __nccwpck_require__(6228); +const GLOBSTAR = '**'; +const ESCAPE_SYMBOL = '\\'; +const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; +const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; +const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; +const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; +const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; +/** + * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. + * The latter is due to the presence of the device path at the beginning of the UNC path. + */ +const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; +function isStaticPattern(pattern, options = {}) { + return !isDynamicPattern(pattern, options); } -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -(function (FastGlob) { - function sync(source, options) { - assertPatternsInput(source); - const works = getWorks(source, sync_1.default, options); - return utils.array.flatten(works); +exports.isStaticPattern = isStaticPattern; +function isDynamicPattern(pattern, options = {}) { + /** + * A special case with an empty string is necessary for matching patterns that start with a forward slash. + * An empty string cannot be a dynamic pattern. + * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. + */ + if (pattern === '') { + return false; } - FastGlob.sync = sync; - function stream(source, options) { - assertPatternsInput(source); - const works = getWorks(source, stream_1.default, options); - /** - * The stream returned by the provider cannot work with an asynchronous iterator. - * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. - * This affects performance (+25%). I don't see best solution right now. - */ - return utils.stream.merge(works); + /** + * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check + * filepath directly (without read directory). + */ + if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { + return true; } - FastGlob.stream = stream; - function generateTasks(source, options) { - assertPatternsInput(source); - const patterns = [].concat(source); - const settings = new settings_1.default(options); - return taskManager.generate(patterns, settings); + if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { + return true; } - FastGlob.generateTasks = generateTasks; - function isDynamicPattern(source, options) { - assertPatternsInput(source); - const settings = new settings_1.default(options); - return utils.pattern.isDynamicPattern(source, settings); + if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { + return true; } - FastGlob.isDynamicPattern = isDynamicPattern; - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escape(source); + if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { + return true; } - FastGlob.escapePath = escapePath; -})(FastGlob || (FastGlob = {})); -function getWorks(source, _Provider, options) { - const patterns = [].concat(source); - const settings = new settings_1.default(options); - const tasks = taskManager.generate(patterns, settings); - const provider = new _Provider(settings); - return tasks.map(provider.read, provider); + return false; } -function assertPatternsInput(input) { - const source = [].concat(input); - const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); - if (!isValidSource) { - throw new TypeError('Patterns must be a string (non empty) or an array of strings'); +exports.isDynamicPattern = isDynamicPattern; +function hasBraceExpansion(pattern) { + const openingBraceIndex = pattern.indexOf('{'); + if (openingBraceIndex === -1) { + return false; + } + const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); + if (closingBraceIndex === -1) { + return false; } + const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); + return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); } -module.exports = FastGlob; - - -/***/ }), - -/***/ 2708: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = __nccwpck_require__(5444); -function generate(patterns, settings) { - const positivePatterns = getPositivePatterns(patterns); - const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore); - const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); - const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); - const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); - const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); - return staticTasks.concat(dynamicTasks); +function convertToPositivePattern(pattern) { + return isNegativePattern(pattern) ? pattern.slice(1) : pattern; } -exports.generate = generate; -function convertPatternsToTasks(positive, negative, dynamic) { - const positivePatternsGroup = groupPatternsByBaseDirectory(positive); - // When we have a global group – there is no reason to divide the patterns into independent tasks. - // In this case, the global task covers the rest. - if ('.' in positivePatternsGroup) { - const task = convertPatternGroupToTask('.', positive, negative, dynamic); - return [task]; - } - return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic); +exports.convertToPositivePattern = convertToPositivePattern; +function convertToNegativePattern(pattern) { + return '!' + pattern; } -exports.convertPatternsToTasks = convertPatternsToTasks; +exports.convertToNegativePattern = convertToNegativePattern; +function isNegativePattern(pattern) { + return pattern.startsWith('!') && pattern[1] !== '('; +} +exports.isNegativePattern = isNegativePattern; +function isPositivePattern(pattern) { + return !isNegativePattern(pattern); +} +exports.isPositivePattern = isPositivePattern; +function getNegativePatterns(patterns) { + return patterns.filter(isNegativePattern); +} +exports.getNegativePatterns = getNegativePatterns; function getPositivePatterns(patterns) { - return utils.pattern.getPositivePatterns(patterns); + return patterns.filter(isPositivePattern); } exports.getPositivePatterns = getPositivePatterns; -function getNegativePatternsAsPositive(patterns, ignore) { - const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); - const positive = negative.map(utils.pattern.convertToPositivePattern); - return positive; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsInsideCurrentDirectory(patterns) { + return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); } -exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; -function groupPatternsByBaseDirectory(patterns) { - const group = {}; - return patterns.reduce((collection, pattern) => { - const base = utils.pattern.getBaseDirectory(pattern); - if (base in collection) { - collection[base].push(pattern); - } - else { - collection[base] = [pattern]; - } - return collection; - }, group); +exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsOutsideCurrentDirectory(patterns) { + return patterns.filter(isPatternRelatedToParentDirectory); } -exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; -function convertPatternGroupsToTasks(positive, negative, dynamic) { - return Object.keys(positive).map((base) => { - return convertPatternGroupToTask(base, positive[base], negative, dynamic); - }); +exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; +function isPatternRelatedToParentDirectory(pattern) { + return pattern.startsWith('..') || pattern.startsWith('./..'); } -exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; -function convertPatternGroupToTask(base, positive, negative, dynamic) { - return { - dynamic, - positive, - negative, - base, - patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) - }; +exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; +function getBaseDirectory(pattern) { + return globParent(pattern, { flipBackslashes: false }); } -exports.convertPatternGroupToTask = convertPatternGroupToTask; +exports.getBaseDirectory = getBaseDirectory; +function hasGlobStar(pattern) { + return pattern.includes(GLOBSTAR); +} +exports.hasGlobStar = hasGlobStar; +function endsWithSlashGlobStar(pattern) { + return pattern.endsWith('/' + GLOBSTAR); +} +exports.endsWithSlashGlobStar = endsWithSlashGlobStar; +function isAffectDepthOfReadingPattern(pattern) { + const basename = path.basename(pattern); + return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); +} +exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; +function expandPatternsWithBraceExpansion(patterns) { + return patterns.reduce((collection, pattern) => { + return collection.concat(expandBraceExpansion(pattern)); + }, []); +} +exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; +function expandBraceExpansion(pattern) { + const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); + /** + * Sort the patterns by length so that the same depth patterns are processed side by side. + * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` + */ + patterns.sort((a, b) => a.length - b.length); + /** + * Micromatch can return an empty string in the case of patterns like `{a,}`. + */ + return patterns.filter((pattern) => pattern !== ''); +} +exports.expandBraceExpansion = expandBraceExpansion; +function getPatternParts(pattern, options) { + let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); + /** + * The scan method returns an empty array in some cases. + * See micromatch/picomatch#58 for more details. + */ + if (parts.length === 0) { + parts = [pattern]; + } + /** + * The scan method does not return an empty part for the pattern with a forward slash. + * This is another part of micromatch/picomatch#58. + */ + if (parts[0].startsWith('/')) { + parts[0] = parts[0].slice(1); + parts.unshift(''); + } + return parts; +} +exports.getPatternParts = getPatternParts; +function makeRe(pattern, options) { + return micromatch.makeRe(pattern, options); +} +exports.makeRe = makeRe; +function convertPatternsToRe(patterns, options) { + return patterns.map((pattern) => makeRe(pattern, options)); +} +exports.convertPatternsToRe = convertPatternsToRe; +function matchAny(entry, patternsRe) { + return patternsRe.some((patternRe) => patternRe.test(entry)); +} +exports.matchAny = matchAny; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +function removeDuplicateSlashes(pattern) { + return pattern.replace(DOUBLE_SLASH_RE, '/'); +} +exports.removeDuplicateSlashes = removeDuplicateSlashes; /***/ }), -/***/ 5679: +/***/ 8382: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2083); -const provider_1 = __nccwpck_require__(257); -class ProviderAsync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = []; - return new Promise((resolve, reject) => { - const stream = this.api(root, task, options); - stream.once('error', reject); - stream.on('data', (entry) => entries.push(options.transform(entry))); - stream.once('end', () => resolve(entries)); - }); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } +exports.merge = void 0; +const merge2 = __nccwpck_require__(2578); +function merge(streams) { + const mergedStream = merge2(streams); + streams.forEach((stream) => { + stream.once('error', (error) => mergedStream.emit('error', error)); + }); + mergedStream.once('close', () => propagateCloseEventToSources(streams)); + mergedStream.once('end', () => propagateCloseEventToSources(streams)); + return mergedStream; +} +exports.merge = merge; +function propagateCloseEventToSources(streams) { + streams.forEach((stream) => stream.emit('close')); } -exports.default = ProviderAsync; /***/ }), -/***/ 6983: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 2203: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -const partial_1 = __nccwpck_require__(5295); -class DeepFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - } - getFilter(basePath, positive, negative) { - const matcher = this._getMatcher(positive); - const negativeRe = this._getNegativePatternsRe(negative); - return (entry) => this._filter(basePath, entry, matcher, negativeRe); - } - _getMatcher(patterns) { - return new partial_1.default(patterns, this._settings, this._micromatchOptions); - } - _getNegativePatternsRe(patterns) { - const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); - return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); - } - _filter(basePath, entry, matcher, negativeRe) { - if (this._isSkippedByDeep(basePath, entry.path)) { - return false; - } - if (this._isSkippedSymbolicLink(entry)) { - return false; - } - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._isSkippedByPositivePatterns(filepath, matcher)) { - return false; - } - return this._isSkippedByNegativePatterns(filepath, negativeRe); - } - _isSkippedByDeep(basePath, entryPath) { - /** - * Avoid unnecessary depth calculations when it doesn't matter. - */ - if (this._settings.deep === Infinity) { - return false; - } - return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; - } - _getEntryLevel(basePath, entryPath) { - const entryPathDepth = entryPath.split('/').length; - if (basePath === '') { - return entryPathDepth; - } - const basePathDepth = basePath.split('/').length; - return entryPathDepth - basePathDepth; - } - _isSkippedSymbolicLink(entry) { - return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); - } - _isSkippedByPositivePatterns(entryPath, matcher) { - return !this._settings.baseNameMatch && !matcher.match(entryPath); - } - _isSkippedByNegativePatterns(entryPath, patternsRe) { - return !utils.pattern.matchAny(entryPath, patternsRe); - } +exports.isEmpty = exports.isString = void 0; +function isString(input) { + return typeof input === 'string'; +} +exports.isString = isString; +function isEmpty(input) { + return input === ''; } -exports.default = DeepFilter; +exports.isEmpty = isEmpty; /***/ }), -/***/ 1343: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7340: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class EntryFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this.index = new Map(); - } - getFilter(positive, negative) { - const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); - const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions); - return (entry) => this._filter(entry, positiveRe, negativeRe); + +/* eslint-disable no-var */ + +var reusify = __nccwpck_require__(2113) + +function fastqueue (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } + + if (concurrency < 1) { + throw new Error('fastqueue concurrency must be greater than 1') + } + + var cache = reusify(Task) + var queueHead = null + var queueTail = null + var _running = 0 + var errorHandler = null + + var self = { + push: push, + drain: noop, + saturated: noop, + pause: pause, + paused: false, + concurrency: concurrency, + running: running, + resume: resume, + idle: idle, + length: length, + getQueue: getQueue, + unshift: unshift, + empty: noop, + kill: kill, + killAndDrain: killAndDrain, + error: error + } + + return self + + function running () { + return _running + } + + function pause () { + self.paused = true + } + + function length () { + var current = queueHead + var counter = 0 + + while (current) { + current = current.next + counter++ } - _filter(entry, positiveRe, negativeRe) { - if (this._settings.unique && this._isDuplicateEntry(entry)) { - return false; - } - if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { - return false; - } - if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) { - return false; - } - const filepath = this._settings.baseNameMatch ? entry.name : entry.path; - const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe); - if (this._settings.unique && isMatched) { - this._createIndexRecord(entry); - } - return isMatched; + + return counter + } + + function getQueue () { + var current = queueHead + var tasks = [] + + while (current) { + tasks.push(current.value) + current = current.next } - _isDuplicateEntry(entry) { - return this.index.has(entry.path); + + return tasks + } + + function resume () { + if (!self.paused) return + self.paused = false + for (var i = 0; i < self.concurrency; i++) { + _running++ + release() } - _createIndexRecord(entry) { - this.index.set(entry.path, undefined); + } + + function idle () { + return _running === 0 && self.length() === 0 + } + + function push (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running === self.concurrency || self.paused) { + if (queueTail) { + queueTail.next = current + queueTail = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) } - _onlyFileFilter(entry) { - return this._settings.onlyFiles && !entry.dirent.isFile(); + } + + function unshift (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + + if (_running === self.concurrency || self.paused) { + if (queueHead) { + current.next = queueHead + queueHead = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) } - _onlyDirectoryFilter(entry) { - return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + + function release (holder) { + if (holder) { + cache.release(holder) } - _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { - if (!this._settings.absolute) { - return false; + var next = queueHead + if (next) { + if (!self.paused) { + if (queueTail === queueHead) { + queueTail = null } - const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); - return utils.pattern.matchAny(fullpath, patternsRe); - } - _isMatchToPatterns(entryPath, patternsRe) { - const filepath = utils.path.removeLeadingDotSegment(entryPath); - return utils.pattern.matchAny(filepath, patternsRe); + queueHead = next.next + next.next = null + worker.call(context, next.value, next.worked) + if (queueTail === null) { + self.empty() + } + } else { + _running-- + } + } else if (--_running === 0) { + self.drain() } -} -exports.default = EntryFilter; + } + function kill () { + queueHead = null + queueTail = null + self.drain = noop + } -/***/ }), + function killAndDrain () { + queueHead = null + queueTail = null + self.drain() + self.drain = noop + } -/***/ 6654: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + function error (handler) { + errorHandler = handler + } +} -"use strict"; +function noop () {} -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class ErrorFilter { - constructor(_settings) { - this._settings = _settings; - } - getFilter() { - return (error) => this._isNonFatalError(error); - } - _isNonFatalError(error) { - return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; +function Task () { + this.value = null + this.callback = noop + this.next = null + this.release = noop + this.context = null + this.errorHandler = null + + var self = this + + this.worked = function worked (err, result) { + var callback = self.callback + var errorHandler = self.errorHandler + var val = self.value + self.value = null + self.callback = noop + if (self.errorHandler) { + errorHandler(err, val) } + callback.call(self.context, err, result) + self.release(self) + } } -exports.default = ErrorFilter; +function queueAsPromised (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } -/***/ }), + function asyncWrapper (arg, cb) { + worker.call(this, arg) + .then(function (res) { + cb(null, res) + }, cb) + } -/***/ 2576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var queue = fastqueue(context, asyncWrapper, concurrency) -"use strict"; + var pushCb = queue.push + var unshiftCb = queue.unshift -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class Matcher { - constructor(_patterns, _settings, _micromatchOptions) { - this._patterns = _patterns; - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this._storage = []; - this._fillStorage(); - } - _fillStorage() { - /** - * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level). - * So, before expand patterns with brace expansion into separated patterns. - */ - const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns); - for (const pattern of patterns) { - const segments = this._getPatternSegments(pattern); - const sections = this._splitSegmentsIntoSections(segments); - this._storage.push({ - complete: sections.length <= 1, - pattern, - segments, - sections - }); + queue.push = push + queue.unshift = unshift + queue.drained = drained + + return queue + + function push (value) { + var p = new Promise(function (resolve, reject) { + pushCb(value, function (err, result) { + if (err) { + reject(err) + return } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function unshift (value) { + var p = new Promise(function (resolve, reject) { + unshiftCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function drained () { + if (queue.idle()) { + return new Promise(function (resolve) { + resolve() + }) } - _getPatternSegments(pattern) { - const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); - return parts.map((part) => { - const dynamic = utils.pattern.isDynamicPattern(part, this._settings); - if (!dynamic) { - return { - dynamic: false, - pattern: part - }; - } - return { - dynamic: true, - pattern: part, - patternRe: utils.pattern.makeRe(part, this._micromatchOptions) - }; - }); - } - _splitSegmentsIntoSections(segments) { - return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); - } + + var previousDrain = queue.drain + + var p = new Promise(function (resolve) { + queue.drain = function () { + previousDrain() + resolve() + } + }) + + return p + } } -exports.default = Matcher; + +module.exports = fastqueue +module.exports.promise = queueAsPromised /***/ }), -/***/ 5295: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6330: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +/*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -const matcher_1 = __nccwpck_require__(2576); -class PartialMatcher extends matcher_1.default { - match(filepath) { - const parts = filepath.split('/'); - const levels = parts.length; - const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); - for (const pattern of patterns) { - const section = pattern.sections[0]; - /** - * In this case, the pattern has a globstar and we must read all directories unconditionally, - * but only if the level has reached the end of the first group. - * - * fixtures/{a,b}/** - * ^ true/false ^ always true - */ - if (!pattern.complete && levels > section.length) { - return true; - } - const match = parts.every((part, index) => { - const segment = pattern.segments[index]; - if (segment.dynamic && segment.patternRe.test(part)) { - return true; - } - if (!segment.dynamic && segment.pattern === part) { - return true; - } - return false; - }); - if (match) { - return true; - } - } - return false; - } -} -exports.default = PartialMatcher; -/***/ }), +const util = __nccwpck_require__(3837); +const toRegexRange = __nccwpck_require__(1861); -/***/ 257: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); -"use strict"; +const transform = toNumber => { + return value => toNumber === true ? Number(value) : String(value); +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); -const deep_1 = __nccwpck_require__(6983); -const entry_1 = __nccwpck_require__(1343); -const error_1 = __nccwpck_require__(6654); -const entry_2 = __nccwpck_require__(4029); -class Provider { - constructor(_settings) { - this._settings = _settings; - this.errorFilter = new error_1.default(this._settings); - this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); - this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); - this.entryTransformer = new entry_2.default(this._settings); - } - _getRootDirectory(task) { - return path.resolve(this._settings.cwd, task.base); - } - _getReaderOptions(task) { - const basePath = task.base === '.' ? '' : task.base; - return { - basePath, - pathSegmentSeparator: '/', - concurrency: this._settings.concurrency, - deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), - entryFilter: this.entryFilter.getFilter(task.positive, task.negative), - errorFilter: this.errorFilter.getFilter(), - followSymbolicLinks: this._settings.followSymbolicLinks, - fs: this._settings.fs, - stats: this._settings.stats, - throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, - transform: this.entryTransformer.getTransformer() - }; - } - _getMicromatchOptions() { - return { - dot: this._settings.dot, - matchBase: this._settings.baseNameMatch, - nobrace: !this._settings.braceExpansion, - nocase: !this._settings.caseSensitiveMatch, - noext: !this._settings.extglob, - noglobstar: !this._settings.globstar, - posix: true, - strictSlashes: false - }; - } -} -exports.default = Provider; +const isValidValue = value => { + return typeof value === 'number' || (typeof value === 'string' && value !== ''); +}; +const isNumber = num => Number.isInteger(+num); -/***/ }), +const zeros = input => { + let value = `${input}`; + let index = -1; + if (value[0] === '-') value = value.slice(1); + if (value === '0') return false; + while (value[++index] === '0'); + return index > 0; +}; -/***/ 4630: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const stringify = (start, end, options) => { + if (typeof start === 'string' || typeof end === 'string') { + return true; + } + return options.stringify === true; +}; -"use strict"; +const pad = (input, maxLength, toNumber) => { + if (maxLength > 0) { + let dash = input[0] === '-' ? '-' : ''; + if (dash) input = input.slice(1); + input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); + } + if (toNumber === false) { + return String(input); + } + return input; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); -const stream_2 = __nccwpck_require__(2083); -const provider_1 = __nccwpck_require__(257); -class ProviderStream extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_2.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const source = this.api(root, task, options); - const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); - source - .once('error', (error) => destination.emit('error', error)) - .on('data', (entry) => destination.emit('data', options.transform(entry))) - .once('end', () => destination.emit('end')); - destination - .once('close', () => source.destroy()); - return destination; - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderStream; +const toMaxLen = (input, maxLength) => { + let negative = input[0] === '-' ? '-' : ''; + if (negative) { + input = input.slice(1); + maxLength--; + } + while (input.length < maxLength) input = '0' + input; + return negative ? ('-' + input) : input; +}; +const toSequence = (parts, options) => { + parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); -/***/ }), + let prefix = options.capture ? '' : '?:'; + let positives = ''; + let negatives = ''; + let result; -/***/ 2405: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (parts.positives.length) { + positives = parts.positives.join('|'); + } -"use strict"; + if (parts.negatives.length) { + negatives = `-(${prefix}${parts.negatives.join('|')})`; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -const sync_1 = __nccwpck_require__(8821); -const provider_1 = __nccwpck_require__(257); -class ProviderSync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new sync_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = this.api(root, task, options); - return entries.map(options.transform); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderSync; + if (positives && negatives) { + result = `${positives}|${negatives}`; + } else { + result = positives || negatives; + } + if (options.wrap) { + return `(${prefix}${result})`; + } -/***/ }), + return result; +}; -/***/ 4029: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const toRange = (a, b, isNumbers, options) => { + if (isNumbers) { + return toRegexRange(a, b, { wrap: false, ...options }); + } -"use strict"; + let start = String.fromCharCode(a); + if (a === b) return start; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class EntryTransformer { - constructor(_settings) { - this._settings = _settings; - } - getTransformer() { - return (entry) => this._transform(entry); - } - _transform(entry) { - let filepath = entry.path; - if (this._settings.absolute) { - filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); - filepath = utils.path.unixify(filepath); - } - if (this._settings.markDirectories && entry.dirent.isDirectory()) { - filepath += '/'; - } - if (!this._settings.objectMode) { - return filepath; - } - return Object.assign(Object.assign({}, entry), { path: filepath }); - } -} -exports.default = EntryTransformer; + let stop = String.fromCharCode(b); + return `[${start}-${stop}]`; +}; +const toRegex = (start, end, options) => { + if (Array.isArray(start)) { + let wrap = options.wrap === true; + let prefix = options.capture ? '' : '?:'; + return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); + } + return toRegexRange(start, end, options); +}; -/***/ }), +const rangeError = (...args) => { + return new RangeError('Invalid range arguments: ' + util.inspect(...args)); +}; -/***/ 8062: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const invalidRange = (start, end, options) => { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; +}; -"use strict"; +const invalidStep = (step, options) => { + if (options.strictRanges === true) { + throw new TypeError(`Expected step "${step}" to be a number`); + } + return []; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); -const fsStat = __nccwpck_require__(109); -const utils = __nccwpck_require__(5444); -class Reader { - constructor(_settings) { - this._settings = _settings; - this._fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this._settings.followSymbolicLinks, - fs: this._settings.fs, - throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks - }); - } - _getFullEntryPath(filepath) { - return path.resolve(this._settings.cwd, filepath); - } - _makeEntry(stats, pattern) { - const entry = { - name: pattern, - path: pattern, - dirent: utils.fs.createDirentFromStats(pattern, stats) - }; - if (this._settings.stats) { - entry.stats = stats; - } - return entry; - } - _isFatalError(error) { - return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; +const fillNumbers = (start, end, step = 1, options = {}) => { + let a = Number(start); + let b = Number(end); + + if (!Number.isInteger(a) || !Number.isInteger(b)) { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; + } + + // fix negative zero + if (a === 0) a = 0; + if (b === 0) b = 0; + + let descending = a > b; + let startString = String(start); + let endString = String(end); + let stepString = String(step); + step = Math.max(Math.abs(step), 1); + + let padded = zeros(startString) || zeros(endString) || zeros(stepString); + let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; + let toNumber = padded === false && stringify(start, end, options) === false; + let format = options.transform || transform(toNumber); + + if (options.toRegex && step === 1) { + return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); + } + + let parts = { negatives: [], positives: [] }; + let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + if (options.toRegex === true && step > 1) { + push(a); + } else { + range.push(pad(format(a, index), maxLen, toNumber)); } -} -exports.default = Reader; + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return step > 1 + ? toSequence(parts, options) + : toRegex(range, null, { wrap: false, ...options }); + } + + return range; +}; + +const fillLetters = (start, end, step = 1, options = {}) => { + if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { + return invalidRange(start, end, options); + } + + + let format = options.transform || (val => String.fromCharCode(val)); + let a = `${start}`.charCodeAt(0); + let b = `${end}`.charCodeAt(0); + + let descending = a > b; + let min = Math.min(a, b); + let max = Math.max(a, b); + + if (options.toRegex && step === 1) { + return toRange(min, max, false, options); + } + + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + range.push(format(a, index)); + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return toRegex(range, null, { wrap: false, options }); + } + + return range; +}; + +const fill = (start, end, step, options = {}) => { + if (end == null && isValidValue(start)) { + return [start]; + } + + if (!isValidValue(start) || !isValidValue(end)) { + return invalidRange(start, end, options); + } + + if (typeof step === 'function') { + return fill(start, end, 1, { transform: step }); + } + + if (isObject(step)) { + return fill(start, end, 0, step); + } + + let opts = { ...options }; + if (opts.capture === true) opts.wrap = true; + step = step || opts.step || 1; + + if (!isNumber(step)) { + if (step != null && !isObject(step)) return invalidStep(step, opts); + return fill(start, end, 1, step); + } + + if (isNumber(start) && isNumber(end)) { + return fillNumbers(start, end, step, opts); + } + + return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); +}; + +module.exports = fill; /***/ }), -/***/ 2083: +/***/ 6457: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); -const fsStat = __nccwpck_require__(109); -const fsWalk = __nccwpck_require__(6026); -const reader_1 = __nccwpck_require__(8062); -class ReaderStream extends reader_1.default { - constructor() { - super(...arguments); - this._walkStream = fsWalk.walkStream; - this._stat = fsStat.stat; - } - dynamic(root, options) { - return this._walkStream(root, options); - } - static(patterns, options) { - const filepaths = patterns.map(this._getFullEntryPath, this); - const stream = new stream_1.PassThrough({ objectMode: true }); - stream._write = (index, _enc, done) => { - return this._getEntry(filepaths[index], patterns[index], options) - .then((entry) => { - if (entry !== null && options.entryFilter(entry)) { - stream.push(entry); - } - if (index === filepaths.length - 1) { - stream.end(); +const types_1 = __nccwpck_require__(4597); +function createRejection(error, ...beforeErrorGroups) { + const promise = (async () => { + if (error instanceof types_1.RequestError) { + try { + for (const hooks of beforeErrorGroups) { + if (hooks) { + for (const hook of hooks) { + // eslint-disable-next-line no-await-in-loop + error = await hook(error); + } + } } - done(); - }) - .catch(done); - }; - for (let i = 0; i < filepaths.length; i++) { - stream.write(i); - } - return stream; - } - _getEntry(filepath, pattern, options) { - return this._getStat(filepath) - .then((stats) => this._makeEntry(stats, pattern)) - .catch((error) => { - if (options.errorFilter(error)) { - return null; } - throw error; - }); - } - _getStat(filepath) { - return new Promise((resolve, reject) => { - this._stat(filepath, this._fsStatSettings, (error, stats) => { - return error === null ? resolve(stats) : reject(error); - }); - }); - } + catch (error_) { + error = error_; + } + } + throw error; + })(); + const returnPromise = () => promise; + promise.json = returnPromise; + promise.text = returnPromise; + promise.buffer = returnPromise; + promise.on = returnPromise; + return promise; } -exports.default = ReaderStream; +exports["default"] = createRejection; /***/ }), -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6056: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const fsStat = __nccwpck_require__(109); -const fsWalk = __nccwpck_require__(6026); -const reader_1 = __nccwpck_require__(8062); -class ReaderSync extends reader_1.default { - constructor() { - super(...arguments); - this._walkSync = fsWalk.walkSync; - this._statSync = fsStat.statSync; - } - dynamic(root, options) { - return this._walkSync(root, options); - } - static(patterns, options) { - const entries = []; - for (const pattern of patterns) { - const filepath = this._getFullEntryPath(pattern); - const entry = this._getEntry(filepath, pattern, options); - if (entry === null || !options.entryFilter(entry)) { - continue; - } - entries.push(entry); - } - return entries; - } - _getEntry(filepath, pattern, options) { - try { - const stats = this._getStat(filepath); - return this._makeEntry(stats, pattern); - } - catch (error) { - if (options.errorFilter(error)) { - return null; - } - throw error; +const events_1 = __nccwpck_require__(2361); +const is_1 = __nccwpck_require__(7678); +const PCancelable = __nccwpck_require__(9072); +const types_1 = __nccwpck_require__(4597); +const parse_body_1 = __nccwpck_require__(8220); +const core_1 = __nccwpck_require__(94); +const proxy_events_1 = __nccwpck_require__(3021); +const get_buffer_1 = __nccwpck_require__(4500); +const is_response_ok_1 = __nccwpck_require__(9298); +const proxiedRequestEvents = [ + 'request', + 'response', + 'redirect', + 'uploadProgress', + 'downloadProgress' +]; +function asPromise(normalizedOptions) { + let globalRequest; + let globalResponse; + const emitter = new events_1.EventEmitter(); + const promise = new PCancelable((resolve, reject, onCancel) => { + const makeRequest = (retryCount) => { + const request = new core_1.default(undefined, normalizedOptions); + request.retryCount = retryCount; + request._noPipe = true; + onCancel(() => request.destroy()); + onCancel.shouldReject = false; + onCancel(() => reject(new types_1.CancelError(request))); + globalRequest = request; + request.once('response', async (response) => { + var _a; + response.retryCount = retryCount; + if (response.request.aborted) { + // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error + return; + } + // Download body + let rawBody; + try { + rawBody = await get_buffer_1.default(request); + response.rawBody = rawBody; + } + catch (_b) { + // The same error is caught below. + // See request.once('error') + return; + } + if (request._isAboutToError) { + return; + } + // Parse body + const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase(); + const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding); + const { options } = request; + if (isCompressed && !options.decompress) { + response.body = rawBody; + } + else { + try { + response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding); + } + catch (error) { + // Fallback to `utf8` + response.body = rawBody.toString(); + if (is_response_ok_1.isResponseOk(response)) { + request._beforeError(error); + return; + } + } + } + try { + for (const [index, hook] of options.hooks.afterResponse.entries()) { + // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise + // eslint-disable-next-line no-await-in-loop + response = await hook(response, async (updatedOptions) => { + const typedOptions = core_1.default.normalizeArguments(undefined, { + ...updatedOptions, + retry: { + calculateDelay: () => 0 + }, + throwHttpErrors: false, + resolveBodyOnly: false + }, options); + // Remove any further hooks for that request, because we'll call them anyway. + // The loop continues. We don't want duplicates (asPromise recursion). + typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index); + for (const hook of typedOptions.hooks.beforeRetry) { + // eslint-disable-next-line no-await-in-loop + await hook(typedOptions); + } + const promise = asPromise(typedOptions); + onCancel(() => { + promise.catch(() => { }); + promise.cancel(); + }); + return promise; + }); + } + } + catch (error) { + request._beforeError(new types_1.RequestError(error.message, error, request)); + return; + } + globalResponse = response; + if (!is_response_ok_1.isResponseOk(response)) { + request._beforeError(new types_1.HTTPError(response)); + return; + } + request.destroy(); + resolve(request.options.resolveBodyOnly ? response.body : response); + }); + const onError = (error) => { + if (promise.isCanceled) { + return; + } + const { options } = request; + if (error instanceof types_1.HTTPError && !options.throwHttpErrors) { + const { response } = error; + resolve(request.options.resolveBodyOnly ? response.body : response); + return; + } + reject(error); + }; + request.once('error', onError); + const previousBody = request.options.body; + request.once('retry', (newRetryCount, error) => { + var _a, _b; + if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) { + onError(error); + return; + } + makeRequest(newRetryCount); + }); + proxy_events_1.default(request, emitter, proxiedRequestEvents); + }; + makeRequest(0); + }); + promise.on = (event, fn) => { + emitter.on(event, fn); + return promise; + }; + const shortcut = (responseType) => { + const newPromise = (async () => { + // Wait until downloading has ended + await promise; + const { options } = globalResponse.request; + return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding); + })(); + Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise)); + return newPromise; + }; + promise.json = () => { + const { headers } = globalRequest.options; + if (!globalRequest.writableFinished && headers.accept === undefined) { + headers.accept = 'application/json'; } - } - _getStat(filepath) { - return this._statSync(filepath, this._fsStatSettings); - } + return shortcut('json'); + }; + promise.buffer = () => shortcut('buffer'); + promise.text = () => shortcut('text'); + return promise; } -exports.default = ReaderSync; +exports["default"] = asPromise; +__exportStar(__nccwpck_require__(4597), exports); /***/ }), -/***/ 952: +/***/ 1048: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); -const os = __nccwpck_require__(2087); -/** - * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. - * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 - */ -const CPU_COUNT = Math.max(os.cpus().length, 1); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - lstatSync: fs.lstatSync, - stat: fs.stat, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -class Settings { - constructor(_options = {}) { - this._options = _options; - this.absolute = this._getValue(this._options.absolute, false); - this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); - this.braceExpansion = this._getValue(this._options.braceExpansion, true); - this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); - this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); - this.cwd = this._getValue(this._options.cwd, process.cwd()); - this.deep = this._getValue(this._options.deep, Infinity); - this.dot = this._getValue(this._options.dot, false); - this.extglob = this._getValue(this._options.extglob, true); - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); - this.fs = this._getFileSystemMethods(this._options.fs); - this.globstar = this._getValue(this._options.globstar, true); - this.ignore = this._getValue(this._options.ignore, []); - this.markDirectories = this._getValue(this._options.markDirectories, false); - this.objectMode = this._getValue(this._options.objectMode, false); - this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); - this.onlyFiles = this._getValue(this._options.onlyFiles, true); - this.stats = this._getValue(this._options.stats, false); - this.suppressErrors = this._getValue(this._options.suppressErrors, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); - this.unique = this._getValue(this._options.unique, true); - if (this.onlyDirectories) { - this.onlyFiles = false; +const is_1 = __nccwpck_require__(7678); +const normalizeArguments = (options, defaults) => { + if (is_1.default.null_(options.encoding)) { + throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead'); + } + is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream); + is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType); + // `options.responseType` + if (options.responseType === undefined) { + options.responseType = 'text'; + } + // `options.retry` + const { retry } = options; + if (defaults) { + options.retry = { ...defaults.retry }; + } + else { + options.retry = { + calculateDelay: retryObject => retryObject.computedValue, + limit: 0, + methods: [], + statusCodes: [], + errorCodes: [], + maxRetryAfter: undefined + }; + } + if (is_1.default.object(retry)) { + options.retry = { + ...options.retry, + ...retry + }; + options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))]; + options.retry.statusCodes = [...new Set(options.retry.statusCodes)]; + options.retry.errorCodes = [...new Set(options.retry.errorCodes)]; + } + else if (is_1.default.number(retry)) { + options.retry.limit = retry; + } + if (is_1.default.undefined(options.retry.maxRetryAfter)) { + options.retry.maxRetryAfter = Math.min( + // TypeScript is not smart enough to handle `.filter(x => is.number(x))`. + // eslint-disable-next-line unicorn/no-fn-reference-in-iterator + ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number)); + } + // `options.pagination` + if (is_1.default.object(options.pagination)) { + if (defaults) { + options.pagination = { + ...defaults.pagination, + ...options.pagination + }; } - if (this.stats) { - this.objectMode = true; + const { pagination } = options; + if (!is_1.default.function_(pagination.transform)) { + throw new Error('`options.pagination.transform` must be implemented'); + } + if (!is_1.default.function_(pagination.shouldContinue)) { + throw new Error('`options.pagination.shouldContinue` must be implemented'); + } + if (!is_1.default.function_(pagination.filter)) { + throw new TypeError('`options.pagination.filter` must be implemented'); + } + if (!is_1.default.function_(pagination.paginate)) { + throw new Error('`options.pagination.paginate` must be implemented'); } } - _getValue(option, value) { - return option === undefined ? value : option; - } - _getFileSystemMethods(methods = {}) { - return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + // JSON mode + if (options.responseType === 'json' && options.headers.accept === undefined) { + options.headers.accept = 'application/json'; } -} -exports.default = Settings; + return options; +}; +exports["default"] = normalizeArguments; /***/ }), -/***/ 5325: -/***/ ((__unused_webpack_module, exports) => { +/***/ 8220: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.splitWhen = exports.flatten = void 0; -function flatten(items) { - return items.reduce((collection, item) => [].concat(collection, item), []); -} -exports.flatten = flatten; -function splitWhen(items, predicate) { - const result = [[]]; - let groupIndex = 0; - for (const item of items) { - if (predicate(item)) { - groupIndex++; - result[groupIndex] = []; +const types_1 = __nccwpck_require__(4597); +const parseBody = (response, responseType, parseJson, encoding) => { + const { rawBody } = response; + try { + if (responseType === 'text') { + return rawBody.toString(encoding); } - else { - result[groupIndex].push(item); + if (responseType === 'json') { + return rawBody.length === 0 ? '' : parseJson(rawBody.toString()); } + if (responseType === 'buffer') { + return rawBody; + } + throw new types_1.ParseError({ + message: `Unknown body type '${responseType}'`, + name: 'Error' + }, response); } - return result; -} -exports.splitWhen = splitWhen; - - -/***/ }), - -/***/ 1230: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isEnoentCodeError = void 0; -function isEnoentCodeError(error) { - return error.code === 'ENOENT'; -} -exports.isEnoentCodeError = isEnoentCodeError; + catch (error) { + throw new types_1.ParseError(error, response); + } +}; +exports["default"] = parseBody; /***/ }), -/***/ 7543: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4597: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); +exports.CancelError = exports.ParseError = void 0; +const core_1 = __nccwpck_require__(94); +/** +An error to be thrown when server response code is 2xx, and parsing body fails. +Includes a `response` property. +*/ +class ParseError extends core_1.RequestError { + constructor(error, response) { + const { options } = response.request; + super(`${error.message} in "${options.url.toString()}"`, error, response.request); + this.name = 'ParseError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_BODY_PARSE_FAILURE' : this.code; } } -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); +exports.ParseError = ParseError; +/** +An error to be thrown when the request is aborted with `.cancel()`. +*/ +class CancelError extends core_1.RequestError { + constructor(request) { + super('Promise was canceled', {}, request); + this.name = 'CancelError'; + this.code = 'ERR_CANCELED'; + } + get isCanceled() { + return true; + } } -exports.createDirentFromStats = createDirentFromStats; - - -/***/ }), - -/***/ 5444: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; -const array = __nccwpck_require__(5325); -exports.array = array; -const errno = __nccwpck_require__(1230); -exports.errno = errno; -const fs = __nccwpck_require__(7543); -exports.fs = fs; -const path = __nccwpck_require__(3873); -exports.path = path; -const pattern = __nccwpck_require__(1221); -exports.pattern = pattern; -const stream = __nccwpck_require__(8382); -exports.stream = stream; -const string = __nccwpck_require__(2203); -exports.string = string; +exports.CancelError = CancelError; +__exportStar(__nccwpck_require__(94), exports); /***/ }), -/***/ 3873: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 3462: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0; -const path = __nccwpck_require__(5622); -const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ -const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -function unixify(filepath) { - return filepath.replace(/\\/g, '/'); -} -exports.unixify = unixify; -function makeAbsolute(cwd, filepath) { - return path.resolve(cwd, filepath); -} -exports.makeAbsolute = makeAbsolute; -function escape(pattern) { - return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escape = escape; -function removeLeadingDotSegment(entry) { - // We do not use `startsWith` because this is 10x slower than current implementation for some cases. - // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with - if (entry.charAt(0) === '.') { - const secondCharactery = entry.charAt(1); - if (secondCharactery === '/' || secondCharactery === '\\') { - return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); +exports.retryAfterStatusCodes = void 0; +exports.retryAfterStatusCodes = new Set([413, 429, 503]); +const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => { + if (attemptCount > retryOptions.limit) { + return 0; + } + const hasMethod = retryOptions.methods.includes(error.options.method); + const hasErrorCode = retryOptions.errorCodes.includes(error.code); + const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode); + if (!hasMethod || (!hasErrorCode && !hasStatusCode)) { + return 0; + } + if (error.response) { + if (retryAfter) { + if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) { + return 0; + } + return retryAfter; + } + if (error.response.statusCode === 413) { + return 0; } } - return entry; -} -exports.removeLeadingDotSegment = removeLeadingDotSegment; + const noise = Math.random() * 100; + return ((2 ** (attemptCount - 1)) * 1000) + noise; +}; +exports["default"] = calculateRetryDelay; /***/ }), -/***/ 1221: +/***/ 94: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; -const path = __nccwpck_require__(5622); -const globParent = __nccwpck_require__(4655); -const micromatch = __nccwpck_require__(3913); -const picomatch = __nccwpck_require__(8569); -const GLOBSTAR = '**'; -const ESCAPE_SYMBOL = '\\'; -const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; -const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/; -const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/; -const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/; -const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/; -function isStaticPattern(pattern, options = {}) { - return !isDynamicPattern(pattern, options); -} -exports.isStaticPattern = isStaticPattern; -function isDynamicPattern(pattern, options = {}) { - /** - * A special case with an empty string is necessary for matching patterns that start with a forward slash. - * An empty string cannot be a dynamic pattern. - * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. - */ - if (pattern === '') { - return false; - } - /** - * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check - * filepath directly (without read directory). - */ - if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { - return true; - } - if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) { - return true; +exports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0; +const util_1 = __nccwpck_require__(3837); +const stream_1 = __nccwpck_require__(2781); +const fs_1 = __nccwpck_require__(7147); +const url_1 = __nccwpck_require__(7310); +const http = __nccwpck_require__(3685); +const http_1 = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const http_timer_1 = __nccwpck_require__(8097); +const cacheable_lookup_1 = __nccwpck_require__(8367); +const CacheableRequest = __nccwpck_require__(8116); +const decompressResponse = __nccwpck_require__(2391); +// @ts-expect-error Missing types +const http2wrapper = __nccwpck_require__(4645); +const lowercaseKeys = __nccwpck_require__(9662); +const is_1 = __nccwpck_require__(7678); +const get_body_size_1 = __nccwpck_require__(4564); +const is_form_data_1 = __nccwpck_require__(40); +const proxy_events_1 = __nccwpck_require__(3021); +const timed_out_1 = __nccwpck_require__(2454); +const url_to_options_1 = __nccwpck_require__(8026); +const options_to_url_1 = __nccwpck_require__(9219); +const weakable_map_1 = __nccwpck_require__(7288); +const get_buffer_1 = __nccwpck_require__(4500); +const dns_ip_version_1 = __nccwpck_require__(4993); +const is_response_ok_1 = __nccwpck_require__(9298); +const deprecation_warning_1 = __nccwpck_require__(397); +const normalize_arguments_1 = __nccwpck_require__(1048); +const calculate_retry_delay_1 = __nccwpck_require__(3462); +let globalDnsCache; +const kRequest = Symbol('request'); +const kResponse = Symbol('response'); +const kResponseSize = Symbol('responseSize'); +const kDownloadedSize = Symbol('downloadedSize'); +const kBodySize = Symbol('bodySize'); +const kUploadedSize = Symbol('uploadedSize'); +const kServerResponsesPiped = Symbol('serverResponsesPiped'); +const kUnproxyEvents = Symbol('unproxyEvents'); +const kIsFromCache = Symbol('isFromCache'); +const kCancelTimeouts = Symbol('cancelTimeouts'); +const kStartedReading = Symbol('startedReading'); +const kStopReading = Symbol('stopReading'); +const kTriggerRead = Symbol('triggerRead'); +const kBody = Symbol('body'); +const kJobs = Symbol('jobs'); +const kOriginalResponse = Symbol('originalResponse'); +const kRetryTimeout = Symbol('retryTimeout'); +exports.kIsNormalizedAlready = Symbol('isNormalizedAlready'); +const supportsBrotli = is_1.default.string(process.versions.brotli); +exports.withoutBody = new Set(['GET', 'HEAD']); +exports.knownHookEvents = [ + 'init', + 'beforeRequest', + 'beforeRedirect', + 'beforeError', + 'beforeRetry', + // Promise-Only + 'afterResponse' +]; +function validateSearchParameters(searchParameters) { + // eslint-disable-next-line guard-for-in + for (const key in searchParameters) { + const value = searchParameters[key]; + if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) { + throw new TypeError(`The \`searchParams\` value '${String(value)}' must be a string, number, boolean or null`); + } } - return false; -} -exports.isDynamicPattern = isDynamicPattern; -function convertToPositivePattern(pattern) { - return isNegativePattern(pattern) ? pattern.slice(1) : pattern; -} -exports.convertToPositivePattern = convertToPositivePattern; -function convertToNegativePattern(pattern) { - return '!' + pattern; -} -exports.convertToNegativePattern = convertToNegativePattern; -function isNegativePattern(pattern) { - return pattern.startsWith('!') && pattern[1] !== '('; -} -exports.isNegativePattern = isNegativePattern; -function isPositivePattern(pattern) { - return !isNegativePattern(pattern); -} -exports.isPositivePattern = isPositivePattern; -function getNegativePatterns(patterns) { - return patterns.filter(isNegativePattern); -} -exports.getNegativePatterns = getNegativePatterns; -function getPositivePatterns(patterns) { - return patterns.filter(isPositivePattern); -} -exports.getPositivePatterns = getPositivePatterns; -function getBaseDirectory(pattern) { - return globParent(pattern, { flipBackslashes: false }); -} -exports.getBaseDirectory = getBaseDirectory; -function hasGlobStar(pattern) { - return pattern.includes(GLOBSTAR); -} -exports.hasGlobStar = hasGlobStar; -function endsWithSlashGlobStar(pattern) { - return pattern.endsWith('/' + GLOBSTAR); -} -exports.endsWithSlashGlobStar = endsWithSlashGlobStar; -function isAffectDepthOfReadingPattern(pattern) { - const basename = path.basename(pattern); - return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); } -exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; -function expandPatternsWithBraceExpansion(patterns) { - return patterns.reduce((collection, pattern) => { - return collection.concat(expandBraceExpansion(pattern)); - }, []); +function isClientRequest(clientRequest) { + return is_1.default.object(clientRequest) && !('statusCode' in clientRequest); } -exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; -function expandBraceExpansion(pattern) { - return micromatch.braces(pattern, { - expand: true, - nodupes: true +const cacheableStore = new weakable_map_1.default(); +const waitForOpenFile = async (file) => new Promise((resolve, reject) => { + const onError = (error) => { + reject(error); + }; + // Node.js 12 has incomplete types + if (!file.pending) { + resolve(); + } + file.once('error', onError); + file.once('ready', () => { + file.off('error', onError); + resolve(); }); -} -exports.expandBraceExpansion = expandBraceExpansion; -function getPatternParts(pattern, options) { - let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); - /** - * The scan method returns an empty array in some cases. - * See micromatch/picomatch#58 for more details. - */ - if (parts.length === 0) { - parts = [pattern]; +}); +const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]); +const nonEnumerableProperties = [ + 'context', + 'body', + 'json', + 'form' +]; +exports.setNonEnumerableProperties = (sources, to) => { + // Non enumerable properties shall not be merged + const properties = {}; + for (const source of sources) { + if (!source) { + continue; + } + for (const name of nonEnumerableProperties) { + if (!(name in source)) { + continue; + } + properties[name] = { + writable: true, + configurable: true, + enumerable: false, + // @ts-expect-error TS doesn't see the check above + value: source[name] + }; + } } - /** - * The scan method does not return an empty part for the pattern with a forward slash. - * This is another part of micromatch/picomatch#58. - */ - if (parts[0].startsWith('/')) { - parts[0] = parts[0].slice(1); - parts.unshift(''); + Object.defineProperties(to, properties); +}; +/** +An error to be thrown when a request fails. +Contains a `code` property with error class code, like `ECONNREFUSED`. +*/ +class RequestError extends Error { + constructor(message, error, self) { + var _a, _b; + super(message); + Error.captureStackTrace(this, this.constructor); + this.name = 'RequestError'; + this.code = (_a = error.code) !== null && _a !== void 0 ? _a : 'ERR_GOT_REQUEST_ERROR'; + if (self instanceof Request) { + Object.defineProperty(this, 'request', { + enumerable: false, + value: self + }); + Object.defineProperty(this, 'response', { + enumerable: false, + value: self[kResponse] + }); + Object.defineProperty(this, 'options', { + // This fails because of TS 3.7.2 useDefineForClassFields + // Ref: https://github.com/microsoft/TypeScript/issues/34972 + enumerable: false, + value: self.options + }); + } + else { + Object.defineProperty(this, 'options', { + // This fails because of TS 3.7.2 useDefineForClassFields + // Ref: https://github.com/microsoft/TypeScript/issues/34972 + enumerable: false, + value: self + }); + } + this.timings = (_b = this.request) === null || _b === void 0 ? void 0 : _b.timings; + // Recover the original stacktrace + if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) { + const indexOfMessage = this.stack.indexOf(this.message) + this.message.length; + const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse(); + const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse(); + // Remove duplicated traces + while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) { + thisStackTrace.shift(); + } + this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`; + } } - return parts; } -exports.getPatternParts = getPatternParts; -function makeRe(pattern, options) { - return micromatch.makeRe(pattern, options); +exports.RequestError = RequestError; +/** +An error to be thrown when the server redirects you more than ten times. +Includes a `response` property. +*/ +class MaxRedirectsError extends RequestError { + constructor(request) { + super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request); + this.name = 'MaxRedirectsError'; + this.code = 'ERR_TOO_MANY_REDIRECTS'; + } } -exports.makeRe = makeRe; -function convertPatternsToRe(patterns, options) { - return patterns.map((pattern) => makeRe(pattern, options)); +exports.MaxRedirectsError = MaxRedirectsError; +/** +An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304. +Includes a `response` property. +*/ +class HTTPError extends RequestError { + constructor(response) { + super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request); + this.name = 'HTTPError'; + this.code = 'ERR_NON_2XX_3XX_RESPONSE'; + } } -exports.convertPatternsToRe = convertPatternsToRe; -function matchAny(entry, patternsRe) { - return patternsRe.some((patternRe) => patternRe.test(entry)); +exports.HTTPError = HTTPError; +/** +An error to be thrown when a cache method fails. +For example, if the database goes down or there's a filesystem error. +*/ +class CacheError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'CacheError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_CACHE_ACCESS' : this.code; + } } -exports.matchAny = matchAny; - - -/***/ }), - -/***/ 8382: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.merge = void 0; -const merge2 = __nccwpck_require__(2578); -function merge(streams) { - const mergedStream = merge2(streams); - streams.forEach((stream) => { - stream.once('error', (error) => mergedStream.emit('error', error)); - }); - mergedStream.once('close', () => propagateCloseEventToSources(streams)); - mergedStream.once('end', () => propagateCloseEventToSources(streams)); - return mergedStream; +exports.CacheError = CacheError; +/** +An error to be thrown when the request body is a stream and an error occurs while reading from that stream. +*/ +class UploadError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'UploadError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_UPLOAD' : this.code; + } } -exports.merge = merge; -function propagateCloseEventToSources(streams) { - streams.forEach((stream) => stream.emit('close')); +exports.UploadError = UploadError; +/** +An error to be thrown when the request is aborted due to a timeout. +Includes an `event` and `timings` property. +*/ +class TimeoutError extends RequestError { + constructor(error, timings, request) { + super(error.message, error, request); + this.name = 'TimeoutError'; + this.event = error.event; + this.timings = timings; + } } - - -/***/ }), - -/***/ 2203: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isEmpty = exports.isString = void 0; -function isString(input) { - return typeof input === 'string'; +exports.TimeoutError = TimeoutError; +/** +An error to be thrown when reading from response stream fails. +*/ +class ReadError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'ReadError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_READING_RESPONSE_STREAM' : this.code; + } } -exports.isString = isString; -function isEmpty(input) { - return input === ''; +exports.ReadError = ReadError; +/** +An error to be thrown when given an unsupported protocol. +*/ +class UnsupportedProtocolError extends RequestError { + constructor(options) { + super(`Unsupported protocol "${options.url.protocol}"`, {}, options); + this.name = 'UnsupportedProtocolError'; + this.code = 'ERR_UNSUPPORTED_PROTOCOL'; + } } -exports.isEmpty = isEmpty; - - -/***/ }), - -/***/ 7340: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var reusify = __nccwpck_require__(2113) - -function fastqueue (context, worker, concurrency) { - if (typeof context === 'function') { - concurrency = worker - worker = context - context = null - } - - if (concurrency < 1) { - throw new Error('fastqueue concurrency must be greater than 1') - } - - var cache = reusify(Task) - var queueHead = null - var queueTail = null - var _running = 0 - var errorHandler = null - - var self = { - push: push, - drain: noop, - saturated: noop, - pause: pause, - paused: false, - concurrency: concurrency, - running: running, - resume: resume, - idle: idle, - length: length, - getQueue: getQueue, - unshift: unshift, - empty: noop, - kill: kill, - killAndDrain: killAndDrain, - error: error - } - - return self - - function running () { - return _running - } - - function pause () { - self.paused = true - } - - function length () { - var current = queueHead - var counter = 0 - - while (current) { - current = current.next - counter++ +exports.UnsupportedProtocolError = UnsupportedProtocolError; +const proxiedRequestEvents = [ + 'socket', + 'connect', + 'continue', + 'information', + 'upgrade', + 'timeout' +]; +class Request extends stream_1.Duplex { + constructor(url, options = {}, defaults) { + super({ + // This must be false, to enable throwing after destroy + // It is used for retry logic in Promise API + autoDestroy: false, + // It needs to be zero because we're just proxying the data to another stream + highWaterMark: 0 + }); + this[kDownloadedSize] = 0; + this[kUploadedSize] = 0; + this.requestInitialized = false; + this[kServerResponsesPiped] = new Set(); + this.redirects = []; + this[kStopReading] = false; + this[kTriggerRead] = false; + this[kJobs] = []; + this.retryCount = 0; + // TODO: Remove this when targeting Node.js >= 12 + this._progressCallbacks = []; + const unlockWrite = () => this._unlockWrite(); + const lockWrite = () => this._lockWrite(); + this.on('pipe', (source) => { + source.prependListener('data', unlockWrite); + source.on('data', lockWrite); + source.prependListener('end', unlockWrite); + source.on('end', lockWrite); + }); + this.on('unpipe', (source) => { + source.off('data', unlockWrite); + source.off('data', lockWrite); + source.off('end', unlockWrite); + source.off('end', lockWrite); + }); + this.on('pipe', source => { + if (source instanceof http_1.IncomingMessage) { + this.options.headers = { + ...source.headers, + ...this.options.headers + }; + } + }); + const { json, body, form } = options; + if (json || body || form) { + this._lockWrite(); + } + if (exports.kIsNormalizedAlready in options) { + this.options = options; + } + else { + try { + // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible + this.options = this.constructor.normalizeArguments(url, options, defaults); + } + catch (error) { + // TODO: Move this to `_destroy()` + if (is_1.default.nodeStream(options.body)) { + options.body.destroy(); + } + this.destroy(error); + return; + } + } + (async () => { + var _a; + try { + if (this.options.body instanceof fs_1.ReadStream) { + await waitForOpenFile(this.options.body); + } + const { url: normalizedURL } = this.options; + if (!normalizedURL) { + throw new TypeError('Missing `url` property'); + } + this.requestUrl = normalizedURL.toString(); + decodeURI(this.requestUrl); + await this._finalizeBody(); + await this._makeRequest(); + if (this.destroyed) { + (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy(); + return; + } + // Queued writes etc. + for (const job of this[kJobs]) { + job(); + } + // Prevent memory leak + this[kJobs].length = 0; + this.requestInitialized = true; + } + catch (error) { + if (error instanceof RequestError) { + this._beforeError(error); + return; + } + // This is a workaround for https://github.com/nodejs/node/issues/33335 + if (!this.destroyed) { + this.destroy(error); + } + } + })(); } - - return counter - } - - function getQueue () { - var current = queueHead - var tasks = [] - - while (current) { - tasks.push(current.value) - current = current.next - } - - return tasks - } - - function resume () { - if (!self.paused) return - self.paused = false - for (var i = 0; i < self.concurrency; i++) { - _running++ - release() - } - } - - function idle () { - return _running === 0 && self.length() === 0 - } - - function push (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running === self.concurrency || self.paused) { - if (queueTail) { - queueTail.next = current - queueTail = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function unshift (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - - if (_running === self.concurrency || self.paused) { - if (queueHead) { - current.next = queueHead - queueHead = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function release (holder) { - if (holder) { - cache.release(holder) - } - var next = queueHead - if (next) { - if (!self.paused) { - if (queueTail === queueHead) { - queueTail = null - } - queueHead = next.next - next.next = null - worker.call(context, next.value, next.worked) - if (queueTail === null) { - self.empty() + static normalizeArguments(url, options, defaults) { + var _a, _b, _c, _d, _e; + const rawOptions = options; + if (is_1.default.object(url) && !is_1.default.urlInstance(url)) { + options = { ...defaults, ...url, ...options }; } - } else { - _running-- - } - } else if (--_running === 0) { - self.drain() - } - } - - function kill () { - queueHead = null - queueTail = null - self.drain = noop - } - - function killAndDrain () { - queueHead = null - queueTail = null - self.drain() - self.drain = noop - } - - function error (handler) { - errorHandler = handler - } -} - -function noop () {} - -function Task () { - this.value = null - this.callback = noop - this.next = null - this.release = noop - this.context = null - this.errorHandler = null - - var self = this - - this.worked = function worked (err, result) { - var callback = self.callback - var errorHandler = self.errorHandler - var val = self.value - self.value = null - self.callback = noop - if (self.errorHandler) { - errorHandler(err, val) - } - callback.call(self.context, err, result) - self.release(self) - } -} - -module.exports = fastqueue - - -/***/ }), - -/***/ 4655: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var isGlob = __nccwpck_require__(4466); -var pathPosixDirname = __nccwpck_require__(5622).posix.dirname; -var isWin32 = __nccwpck_require__(2087).platform() === 'win32'; - -var slash = '/'; -var backslash = /\\/g; -var enclosure = /[\{\[].*[\/]*.*[\}\]]$/; -var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; -var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - -/** - * @param {string} str - * @param {Object} opts - * @param {boolean} [opts.flipBackslashes=true] - */ -module.exports = function globParent(str, opts) { - var options = Object.assign({ flipBackslashes: true }, opts); - - // flip windows path separators - if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { - str = str.replace(backslash, slash); - } - - // special case for strings ending in enclosure containing path separator - if (enclosure.test(str)) { - str += slash; - } - - // preserves full path in case of trailing path separator - str += 'a'; - - // remove path parts that are globby - do { - str = pathPosixDirname(str); - } while (isGlob(str) || globby.test(str)); - - // remove escape chars and return result - return str.replace(escaped, '$1'); -}; - - -/***/ }), - -/***/ 6457: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const types_1 = __nccwpck_require__(4597); -function createRejection(error, ...beforeErrorGroups) { - const promise = (async () => { - if (error instanceof types_1.RequestError) { - try { - for (const hooks of beforeErrorGroups) { - if (hooks) { - for (const hook of hooks) { - // eslint-disable-next-line no-await-in-loop - error = await hook(error); - } - } - } + else { + if (url && options && options.url !== undefined) { + throw new TypeError('The `url` option is mutually exclusive with the `input` argument'); } - catch (error_) { - error = error_; + options = { ...defaults, ...options }; + if (url !== undefined) { + options.url = url; + } + if (is_1.default.urlInstance(options.url)) { + options.url = new url_1.URL(options.url.toString()); } } - throw error; - })(); - const returnPromise = () => promise; - promise.json = returnPromise; - promise.text = returnPromise; - promise.buffer = returnPromise; - promise.on = returnPromise; - return promise; -} -exports.default = createRejection; - - -/***/ }), - -/***/ 6056: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const events_1 = __nccwpck_require__(8614); -const is_1 = __nccwpck_require__(7678); -const PCancelable = __nccwpck_require__(9072); -const types_1 = __nccwpck_require__(4597); -const parse_body_1 = __nccwpck_require__(8220); -const core_1 = __nccwpck_require__(94); -const proxy_events_1 = __nccwpck_require__(3021); -const get_buffer_1 = __nccwpck_require__(4500); -const is_response_ok_1 = __nccwpck_require__(9298); -const proxiedRequestEvents = [ - 'request', - 'response', - 'redirect', - 'uploadProgress', - 'downloadProgress' -]; -function asPromise(normalizedOptions) { - let globalRequest; - let globalResponse; - const emitter = new events_1.EventEmitter(); - const promise = new PCancelable((resolve, reject, onCancel) => { - const makeRequest = (retryCount) => { - const request = new core_1.default(undefined, normalizedOptions); - request.retryCount = retryCount; - request._noPipe = true; - onCancel(() => request.destroy()); - onCancel.shouldReject = false; - onCancel(() => reject(new types_1.CancelError(request))); - globalRequest = request; - request.once('response', async (response) => { - var _a; - response.retryCount = retryCount; - if (response.request.aborted) { - // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error - return; - } - // Download body - let rawBody; - try { - rawBody = await get_buffer_1.default(request); - response.rawBody = rawBody; - } - catch (_b) { - // The same error is caught below. - // See request.once('error') - return; - } - if (request._isAboutToError) { - return; - } - // Parse body - const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase(); - const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding); - const { options } = request; - if (isCompressed && !options.decompress) { - response.body = rawBody; + // TODO: Deprecate URL options in Got 12. + // Support extend-specific options + if (options.cache === false) { + options.cache = undefined; + } + if (options.dnsCache === false) { + options.dnsCache = undefined; + } + // Nice type assertions + is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method); + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers); + is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl); + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar); + is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams); + is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache); + is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout); + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context); + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect); + is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody); + is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress); + is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion); + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https); + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized); + if (options.https) { + is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized); + is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity); + is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority); + is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key); + is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate); + is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase); + is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx); + } + is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions); + // `options.method` + if (is_1.default.string(options.method)) { + options.method = options.method.toUpperCase(); + } + else { + options.method = 'GET'; + } + // `options.headers` + if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) { + options.headers = { ...options.headers }; + } + else { + options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers }); + } + // Disallow legacy `url.Url` + if ('slashes' in options) { + throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.'); + } + // `options.auth` + if ('auth' in options) { + throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.'); + } + // `options.searchParams` + if ('searchParams' in options) { + if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) { + let searchParameters; + if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) { + searchParameters = new url_1.URLSearchParams(options.searchParams); } else { - try { - response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding); - } - catch (error) { - // Fallback to `utf8` - response.body = rawBody.toString(); - if (is_response_ok_1.isResponseOk(response)) { - request._beforeError(error); - return; + validateSearchParameters(options.searchParams); + searchParameters = new url_1.URLSearchParams(); + // eslint-disable-next-line guard-for-in + for (const key in options.searchParams) { + const value = options.searchParams[key]; + if (value === null) { + searchParameters.append(key, ''); + } + else if (value !== undefined) { + searchParameters.append(key, value); } } } - try { - for (const [index, hook] of options.hooks.afterResponse.entries()) { - // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise - // eslint-disable-next-line no-await-in-loop - response = await hook(response, async (updatedOptions) => { - const typedOptions = core_1.default.normalizeArguments(undefined, { - ...updatedOptions, - retry: { - calculateDelay: () => 0 - }, - throwHttpErrors: false, - resolveBodyOnly: false - }, options); - // Remove any further hooks for that request, because we'll call them anyway. - // The loop continues. We don't want duplicates (asPromise recursion). - typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index); - for (const hook of typedOptions.hooks.beforeRetry) { - // eslint-disable-next-line no-await-in-loop - await hook(typedOptions); - } - const promise = asPromise(typedOptions); - onCancel(() => { - promise.catch(() => { }); - promise.cancel(); - }); - return promise; - }); + // `normalizeArguments()` is also used to merge options + (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => { + // Only use default if one isn't already defined + if (!searchParameters.has(key)) { + searchParameters.append(key, value); } - } - catch (error) { - request._beforeError(new types_1.RequestError(error.message, error, request)); - return; - } - if (!is_response_ok_1.isResponseOk(response)) { - request._beforeError(new types_1.HTTPError(response)); - return; - } - globalResponse = response; - resolve(request.options.resolveBodyOnly ? response.body : response); - }); - const onError = (error) => { - if (promise.isCanceled) { - return; - } - const { options } = request; - if (error instanceof types_1.HTTPError && !options.throwHttpErrors) { - const { response } = error; - resolve(request.options.resolveBodyOnly ? response.body : response); - return; - } - reject(error); - }; - request.once('error', onError); - const previousBody = request.options.body; - request.once('retry', (newRetryCount, error) => { - var _a, _b; - if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) { - onError(error); - return; - } - makeRequest(newRetryCount); - }); - proxy_events_1.default(request, emitter, proxiedRequestEvents); - }; - makeRequest(0); - }); - promise.on = (event, fn) => { - emitter.on(event, fn); - return promise; - }; - const shortcut = (responseType) => { - const newPromise = (async () => { - // Wait until downloading has ended - await promise; - const { options } = globalResponse.request; - return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding); - })(); - Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise)); - return newPromise; - }; - promise.json = () => { - const { headers } = globalRequest.options; - if (!globalRequest.writableFinished && headers.accept === undefined) { - headers.accept = 'application/json'; + }); + options.searchParams = searchParameters; + } } - return shortcut('json'); - }; - promise.buffer = () => shortcut('buffer'); - promise.text = () => shortcut('text'); - return promise; -} -exports.default = asPromise; -__exportStar(__nccwpck_require__(4597), exports); - - -/***/ }), - -/***/ 1048: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const is_1 = __nccwpck_require__(7678); -const normalizeArguments = (options, defaults) => { - if (is_1.default.null_(options.encoding)) { - throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead'); - } - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType); - // `options.responseType` - if (options.responseType === undefined) { - options.responseType = 'text'; - } - // `options.retry` - const { retry } = options; - if (defaults) { - options.retry = { ...defaults.retry }; - } - else { - options.retry = { - calculateDelay: retryObject => retryObject.computedValue, - limit: 0, - methods: [], - statusCodes: [], - errorCodes: [], - maxRetryAfter: undefined - }; - } - if (is_1.default.object(retry)) { - options.retry = { - ...options.retry, - ...retry - }; - options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))]; - options.retry.statusCodes = [...new Set(options.retry.statusCodes)]; - options.retry.errorCodes = [...new Set(options.retry.errorCodes)]; - } - else if (is_1.default.number(retry)) { - options.retry.limit = retry; - } - if (is_1.default.undefined(options.retry.maxRetryAfter)) { - options.retry.maxRetryAfter = Math.min( - // TypeScript is not smart enough to handle `.filter(x => is.number(x))`. - // eslint-disable-next-line unicorn/no-fn-reference-in-iterator - ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number)); - } - // `options.pagination` - if (is_1.default.object(options.pagination)) { - if (defaults) { - options.pagination = { - ...defaults.pagination, - ...options.pagination - }; - } - const { pagination } = options; - if (!is_1.default.function_(pagination.transform)) { - throw new Error('`options.pagination.transform` must be implemented'); - } - if (!is_1.default.function_(pagination.shouldContinue)) { - throw new Error('`options.pagination.shouldContinue` must be implemented'); + // `options.username` & `options.password` + options.username = (_b = options.username) !== null && _b !== void 0 ? _b : ''; + options.password = (_c = options.password) !== null && _c !== void 0 ? _c : ''; + // `options.prefixUrl` & `options.url` + if (is_1.default.undefined(options.prefixUrl)) { + options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : ''; } - if (!is_1.default.function_(pagination.filter)) { - throw new TypeError('`options.pagination.filter` must be implemented'); + else { + options.prefixUrl = options.prefixUrl.toString(); + if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) { + options.prefixUrl += '/'; + } } - if (!is_1.default.function_(pagination.paginate)) { - throw new Error('`options.pagination.paginate` must be implemented'); + if (is_1.default.string(options.url)) { + if (options.url.startsWith('/')) { + throw new Error('`input` must not start with a slash when using `prefixUrl`'); + } + options.url = options_to_url_1.default(options.prefixUrl + options.url, options); } - } - // JSON mode - if (options.responseType === 'json' && options.headers.accept === undefined) { - options.headers.accept = 'application/json'; - } - return options; -}; -exports.default = normalizeArguments; - - -/***/ }), - -/***/ 8220: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const types_1 = __nccwpck_require__(4597); -const parseBody = (response, responseType, parseJson, encoding) => { - const { rawBody } = response; - try { - if (responseType === 'text') { - return rawBody.toString(encoding); + else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) { + options.url = options_to_url_1.default(options.prefixUrl, options); } - if (responseType === 'json') { - return rawBody.length === 0 ? '' : parseJson(rawBody.toString()); + if (options.url) { + if ('port' in options) { + delete options.port; + } + // Make it possible to change `options.prefixUrl` + let { prefixUrl } = options; + Object.defineProperty(options, 'prefixUrl', { + set: (value) => { + const url = options.url; + if (!url.href.startsWith(value)) { + throw new Error(`Cannot change \`prefixUrl\` from ${prefixUrl} to ${value}: ${url.href}`); + } + options.url = new url_1.URL(value + url.href.slice(prefixUrl.length)); + prefixUrl = value; + }, + get: () => prefixUrl + }); + // Support UNIX sockets + let { protocol } = options.url; + if (protocol === 'unix:') { + protocol = 'http:'; + options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`); + } + // Set search params + if (options.searchParams) { + // eslint-disable-next-line @typescript-eslint/no-base-to-string + options.url.search = options.searchParams.toString(); + } + // Protocol check + if (protocol !== 'http:' && protocol !== 'https:') { + throw new UnsupportedProtocolError(options); + } + // Update `username` + if (options.username === '') { + options.username = options.url.username; + } + else { + options.url.username = options.username; + } + // Update `password` + if (options.password === '') { + options.password = options.url.password; + } + else { + options.url.password = options.password; + } } - if (responseType === 'buffer') { - return rawBody; + // `options.cookieJar` + const { cookieJar } = options; + if (cookieJar) { + let { setCookie, getCookieString } = cookieJar; + is_1.assert.function_(setCookie); + is_1.assert.function_(getCookieString); + /* istanbul ignore next: Horrible `tough-cookie` v3 check */ + if (setCookie.length === 4 && getCookieString.length === 0) { + setCookie = util_1.promisify(setCookie.bind(options.cookieJar)); + getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar)); + options.cookieJar = { + setCookie, + getCookieString: getCookieString + }; + } } - throw new types_1.ParseError({ - message: `Unknown body type '${responseType}'`, - name: 'Error' - }, response); - } - catch (error) { - throw new types_1.ParseError(error, response); - } -}; -exports.default = parseBody; - - -/***/ }), - -/***/ 4597: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CancelError = exports.ParseError = void 0; -const core_1 = __nccwpck_require__(94); -/** -An error to be thrown when server response code is 2xx, and parsing body fails. -Includes a `response` property. -*/ -class ParseError extends core_1.RequestError { - constructor(error, response) { - const { options } = response.request; - super(`${error.message} in "${options.url.toString()}"`, error, response.request); - this.name = 'ParseError'; - } -} -exports.ParseError = ParseError; -/** -An error to be thrown when the request is aborted with `.cancel()`. -*/ -class CancelError extends core_1.RequestError { - constructor(request) { - super('Promise was canceled', {}, request); - this.name = 'CancelError'; - } - get isCanceled() { - return true; - } -} -exports.CancelError = CancelError; -__exportStar(__nccwpck_require__(94), exports); - - -/***/ }), - -/***/ 3462: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.retryAfterStatusCodes = void 0; -exports.retryAfterStatusCodes = new Set([413, 429, 503]); -const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => { - if (attemptCount > retryOptions.limit) { - return 0; - } - const hasMethod = retryOptions.methods.includes(error.options.method); - const hasErrorCode = retryOptions.errorCodes.includes(error.code); - const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode); - if (!hasMethod || (!hasErrorCode && !hasStatusCode)) { - return 0; - } - if (error.response) { - if (retryAfter) { - if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) { - return 0; + // `options.cache` + const { cache } = options; + if (cache) { + if (!cacheableStore.has(cache)) { + cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => { + const result = requestOptions[kRequest](requestOptions, handler); + // TODO: remove this when `cacheable-request` supports async request functions. + if (is_1.default.promise(result)) { + // @ts-expect-error + // We only need to implement the error handler in order to support HTTP2 caching. + // The result will be a promise anyway. + result.once = (event, handler) => { + if (event === 'error') { + result.catch(handler); + } + else if (event === 'abort') { + // The empty catch is needed here in case when + // it rejects before it's `await`ed in `_makeRequest`. + (async () => { + try { + const request = (await result); + request.once('abort', handler); + } + catch (_a) { } + })(); + } + else { + /* istanbul ignore next: safety check */ + throw new Error(`Unknown HTTP2 promise event: ${event}`); + } + return result; + }; + } + return result; + }), cache)); } - return retryAfter; } - if (error.response.statusCode === 413) { - return 0; + // `options.cacheOptions` + options.cacheOptions = { ...options.cacheOptions }; + // `options.dnsCache` + if (options.dnsCache === true) { + if (!globalDnsCache) { + globalDnsCache = new cacheable_lookup_1.default(); + } + options.dnsCache = globalDnsCache; } - } - const noise = Math.random() * 100; - return ((2 ** (attemptCount - 1)) * 1000) + noise; -}; -exports.default = calculateRetryDelay; - - -/***/ }), - -/***/ 94: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0; -const util_1 = __nccwpck_require__(1669); -const stream_1 = __nccwpck_require__(2413); -const fs_1 = __nccwpck_require__(5747); -const url_1 = __nccwpck_require__(8835); -const http = __nccwpck_require__(8605); -const http_1 = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); -const http_timer_1 = __nccwpck_require__(8097); -const cacheable_lookup_1 = __nccwpck_require__(2286); -const CacheableRequest = __nccwpck_require__(8116); -const decompressResponse = __nccwpck_require__(2391); -// @ts-expect-error Missing types -const http2wrapper = __nccwpck_require__(4645); -const lowercaseKeys = __nccwpck_require__(9662); -const is_1 = __nccwpck_require__(7678); -const get_body_size_1 = __nccwpck_require__(4564); -const is_form_data_1 = __nccwpck_require__(40); -const proxy_events_1 = __nccwpck_require__(3021); -const timed_out_1 = __nccwpck_require__(2454); -const url_to_options_1 = __nccwpck_require__(8026); -const options_to_url_1 = __nccwpck_require__(9219); -const weakable_map_1 = __nccwpck_require__(7288); -const get_buffer_1 = __nccwpck_require__(4500); -const dns_ip_version_1 = __nccwpck_require__(4993); -const is_response_ok_1 = __nccwpck_require__(9298); -const deprecation_warning_1 = __nccwpck_require__(397); -const normalize_arguments_1 = __nccwpck_require__(1048); -const calculate_retry_delay_1 = __nccwpck_require__(3462); -let globalDnsCache; -const kRequest = Symbol('request'); -const kResponse = Symbol('response'); -const kResponseSize = Symbol('responseSize'); -const kDownloadedSize = Symbol('downloadedSize'); -const kBodySize = Symbol('bodySize'); -const kUploadedSize = Symbol('uploadedSize'); -const kServerResponsesPiped = Symbol('serverResponsesPiped'); -const kUnproxyEvents = Symbol('unproxyEvents'); -const kIsFromCache = Symbol('isFromCache'); -const kCancelTimeouts = Symbol('cancelTimeouts'); -const kStartedReading = Symbol('startedReading'); -const kStopReading = Symbol('stopReading'); -const kTriggerRead = Symbol('triggerRead'); -const kBody = Symbol('body'); -const kJobs = Symbol('jobs'); -const kOriginalResponse = Symbol('originalResponse'); -const kRetryTimeout = Symbol('retryTimeout'); -exports.kIsNormalizedAlready = Symbol('isNormalizedAlready'); -const supportsBrotli = is_1.default.string(process.versions.brotli); -exports.withoutBody = new Set(['GET', 'HEAD']); -exports.knownHookEvents = [ - 'init', - 'beforeRequest', - 'beforeRedirect', - 'beforeError', - 'beforeRetry', - // Promise-Only - 'afterResponse' -]; -function validateSearchParameters(searchParameters) { - // eslint-disable-next-line guard-for-in - for (const key in searchParameters) { - const value = searchParameters[key]; - if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) { - throw new TypeError(`The \`searchParams\` value '${String(value)}' must be a string, number, boolean or null`); + else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) { + throw new TypeError(`Parameter \`dnsCache\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`); } - } -} -function isClientRequest(clientRequest) { - return is_1.default.object(clientRequest) && !('statusCode' in clientRequest); -} -const cacheableStore = new weakable_map_1.default(); -const waitForOpenFile = async (file) => new Promise((resolve, reject) => { - const onError = (error) => { - reject(error); - }; - // Node.js 12 has incomplete types - if (!file.pending) { - resolve(); - } - file.once('error', onError); - file.once('ready', () => { - file.off('error', onError); - resolve(); - }); -}); -const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]); -const nonEnumerableProperties = [ - 'context', - 'body', - 'json', - 'form' -]; -exports.setNonEnumerableProperties = (sources, to) => { - // Non enumerable properties shall not be merged - const properties = {}; - for (const source of sources) { - if (!source) { - continue; + // `options.timeout` + if (is_1.default.number(options.timeout)) { + options.timeout = { request: options.timeout }; } - for (const name of nonEnumerableProperties) { - if (!(name in source)) { - continue; - } - properties[name] = { - writable: true, - configurable: true, - enumerable: false, - // @ts-expect-error TS doesn't see the check above - value: source[name] + else if (defaults && options.timeout !== defaults.timeout) { + options.timeout = { + ...defaults.timeout, + ...options.timeout }; } - } - Object.defineProperties(to, properties); -}; -/** -An error to be thrown when a request fails. -Contains a `code` property with error class code, like `ECONNREFUSED`. -*/ -class RequestError extends Error { - constructor(message, error, self) { - var _a; - super(message); - Error.captureStackTrace(this, this.constructor); - this.name = 'RequestError'; - this.code = error.code; - if (self instanceof Request) { - Object.defineProperty(this, 'request', { - enumerable: false, - value: self - }); - Object.defineProperty(this, 'response', { - enumerable: false, - value: self[kResponse] - }); - Object.defineProperty(this, 'options', { - // This fails because of TS 3.7.2 useDefineForClassFields - // Ref: https://github.com/microsoft/TypeScript/issues/34972 - enumerable: false, - value: self.options - }); - } else { - Object.defineProperty(this, 'options', { - // This fails because of TS 3.7.2 useDefineForClassFields - // Ref: https://github.com/microsoft/TypeScript/issues/34972 - enumerable: false, - value: self - }); + options.timeout = { ...options.timeout }; } - this.timings = (_a = this.request) === null || _a === void 0 ? void 0 : _a.timings; - // Recover the original stacktrace - if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) { - const indexOfMessage = this.stack.indexOf(this.message) + this.message.length; - const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse(); - const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse(); - // Remove duplicated traces - while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) { - thisStackTrace.shift(); - } - this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`; - } - } -} -exports.RequestError = RequestError; -/** -An error to be thrown when the server redirects you more than ten times. -Includes a `response` property. -*/ -class MaxRedirectsError extends RequestError { - constructor(request) { - super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request); - this.name = 'MaxRedirectsError'; - } -} -exports.MaxRedirectsError = MaxRedirectsError; -/** -An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304. -Includes a `response` property. -*/ -class HTTPError extends RequestError { - constructor(response) { - super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request); - this.name = 'HTTPError'; - } -} -exports.HTTPError = HTTPError; -/** -An error to be thrown when a cache method fails. -For example, if the database goes down or there's a filesystem error. -*/ -class CacheError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'CacheError'; - } -} -exports.CacheError = CacheError; -/** -An error to be thrown when the request body is a stream and an error occurs while reading from that stream. -*/ -class UploadError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'UploadError'; - } -} -exports.UploadError = UploadError; -/** -An error to be thrown when the request is aborted due to a timeout. -Includes an `event` and `timings` property. -*/ -class TimeoutError extends RequestError { - constructor(error, timings, request) { - super(error.message, error, request); - this.name = 'TimeoutError'; - this.event = error.event; - this.timings = timings; - } -} -exports.TimeoutError = TimeoutError; -/** -An error to be thrown when reading from response stream fails. -*/ -class ReadError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'ReadError'; - } -} -exports.ReadError = ReadError; -/** -An error to be thrown when given an unsupported protocol. -*/ -class UnsupportedProtocolError extends RequestError { - constructor(options) { - super(`Unsupported protocol "${options.url.protocol}"`, {}, options); - this.name = 'UnsupportedProtocolError'; - } -} -exports.UnsupportedProtocolError = UnsupportedProtocolError; -const proxiedRequestEvents = [ - 'socket', - 'connect', - 'continue', - 'information', - 'upgrade', - 'timeout' -]; -class Request extends stream_1.Duplex { - constructor(url, options = {}, defaults) { - super({ - // This must be false, to enable throwing after destroy - // It is used for retry logic in Promise API - autoDestroy: false, - // It needs to be zero because we're just proxying the data to another stream - highWaterMark: 0 - }); - this[kDownloadedSize] = 0; - this[kUploadedSize] = 0; - this.requestInitialized = false; - this[kServerResponsesPiped] = new Set(); - this.redirects = []; - this[kStopReading] = false; - this[kTriggerRead] = false; - this[kJobs] = []; - this.retryCount = 0; - // TODO: Remove this when targeting Node.js >= 12 - this._progressCallbacks = []; - const unlockWrite = () => this._unlockWrite(); - const lockWrite = () => this._lockWrite(); - this.on('pipe', (source) => { - source.prependListener('data', unlockWrite); - source.on('data', lockWrite); - source.prependListener('end', unlockWrite); - source.on('end', lockWrite); - }); - this.on('unpipe', (source) => { - source.off('data', unlockWrite); - source.off('data', lockWrite); - source.off('end', unlockWrite); - source.off('end', lockWrite); - }); - this.on('pipe', source => { - if (source instanceof http_1.IncomingMessage) { - this.options.headers = { - ...source.headers, - ...this.options.headers - }; - } - }); - const { json, body, form } = options; - if (json || body || form) { - this._lockWrite(); - } - if (exports.kIsNormalizedAlready in options) { - this.options = options; - } - else { - try { - // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible - this.options = this.constructor.normalizeArguments(url, options, defaults); - } - catch (error) { - // TODO: Move this to `_destroy()` - if (is_1.default.nodeStream(options.body)) { - options.body.destroy(); - } - this.destroy(error); - return; - } - } - (async () => { - var _a; - try { - if (this.options.body instanceof fs_1.ReadStream) { - await waitForOpenFile(this.options.body); - } - const { url: normalizedURL } = this.options; - if (!normalizedURL) { - throw new TypeError('Missing `url` property'); - } - this.requestUrl = normalizedURL.toString(); - decodeURI(this.requestUrl); - await this._finalizeBody(); - await this._makeRequest(); - if (this.destroyed) { - (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy(); - return; - } - // Queued writes etc. - for (const job of this[kJobs]) { - job(); - } - // Prevent memory leak - this[kJobs].length = 0; - this.requestInitialized = true; - } - catch (error) { - if (error instanceof RequestError) { - this._beforeError(error); - return; - } - // This is a workaround for https://github.com/nodejs/node/issues/33335 - if (!this.destroyed) { - this.destroy(error); - } - } - })(); - } - static normalizeArguments(url, options, defaults) { - var _a, _b, _c, _d, _e; - const rawOptions = options; - if (is_1.default.object(url) && !is_1.default.urlInstance(url)) { - options = { ...defaults, ...url, ...options }; - } - else { - if (url && options && options.url !== undefined) { - throw new TypeError('The `url` option is mutually exclusive with the `input` argument'); - } - options = { ...defaults, ...options }; - if (url !== undefined) { - options.url = url; - } - if (is_1.default.urlInstance(options.url)) { - options.url = new url_1.URL(options.url.toString()); - } - } - // TODO: Deprecate URL options in Got 12. - // Support extend-specific options - if (options.cache === false) { - options.cache = undefined; - } - if (options.dnsCache === false) { - options.dnsCache = undefined; - } - // Nice type assertions - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers); - is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar); - is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams); - is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache); - is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect); - is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress); - is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized); - if (options.https) { - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized); - is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase); - is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx); - } - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions); - // `options.method` - if (is_1.default.string(options.method)) { - options.method = options.method.toUpperCase(); - } - else { - options.method = 'GET'; - } - // `options.headers` - if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) { - options.headers = { ...options.headers }; - } - else { - options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers }); - } - // Disallow legacy `url.Url` - if ('slashes' in options) { - throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.'); - } - // `options.auth` - if ('auth' in options) { - throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.'); - } - // `options.searchParams` - if ('searchParams' in options) { - if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) { - let searchParameters; - if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) { - searchParameters = new url_1.URLSearchParams(options.searchParams); - } - else { - validateSearchParameters(options.searchParams); - searchParameters = new url_1.URLSearchParams(); - // eslint-disable-next-line guard-for-in - for (const key in options.searchParams) { - const value = options.searchParams[key]; - if (value === null) { - searchParameters.append(key, ''); - } - else if (value !== undefined) { - searchParameters.append(key, value); - } - } - } - // `normalizeArguments()` is also used to merge options - (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => { - // Only use default if one isn't already defined - if (!searchParameters.has(key)) { - searchParameters.append(key, value); - } - }); - options.searchParams = searchParameters; - } - } - // `options.username` & `options.password` - options.username = (_b = options.username) !== null && _b !== void 0 ? _b : ''; - options.password = (_c = options.password) !== null && _c !== void 0 ? _c : ''; - // `options.prefixUrl` & `options.url` - if (is_1.default.undefined(options.prefixUrl)) { - options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : ''; - } - else { - options.prefixUrl = options.prefixUrl.toString(); - if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) { - options.prefixUrl += '/'; - } - } - if (is_1.default.string(options.url)) { - if (options.url.startsWith('/')) { - throw new Error('`input` must not start with a slash when using `prefixUrl`'); - } - options.url = options_to_url_1.default(options.prefixUrl + options.url, options); - } - else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) { - options.url = options_to_url_1.default(options.prefixUrl, options); - } - if (options.url) { - if ('port' in options) { - delete options.port; - } - // Make it possible to change `options.prefixUrl` - let { prefixUrl } = options; - Object.defineProperty(options, 'prefixUrl', { - set: (value) => { - const url = options.url; - if (!url.href.startsWith(value)) { - throw new Error(`Cannot change \`prefixUrl\` from ${prefixUrl} to ${value}: ${url.href}`); - } - options.url = new url_1.URL(value + url.href.slice(prefixUrl.length)); - prefixUrl = value; - }, - get: () => prefixUrl - }); - // Support UNIX sockets - let { protocol } = options.url; - if (protocol === 'unix:') { - protocol = 'http:'; - options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`); - } - // Set search params - if (options.searchParams) { - // eslint-disable-next-line @typescript-eslint/no-base-to-string - options.url.search = options.searchParams.toString(); - } - // Protocol check - if (protocol !== 'http:' && protocol !== 'https:') { - throw new UnsupportedProtocolError(options); - } - // Update `username` - if (options.username === '') { - options.username = options.url.username; - } - else { - options.url.username = options.username; - } - // Update `password` - if (options.password === '') { - options.password = options.url.password; - } - else { - options.url.password = options.password; - } - } - // `options.cookieJar` - const { cookieJar } = options; - if (cookieJar) { - let { setCookie, getCookieString } = cookieJar; - is_1.assert.function_(setCookie); - is_1.assert.function_(getCookieString); - /* istanbul ignore next: Horrible `tough-cookie` v3 check */ - if (setCookie.length === 4 && getCookieString.length === 0) { - setCookie = util_1.promisify(setCookie.bind(options.cookieJar)); - getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar)); - options.cookieJar = { - setCookie, - getCookieString: getCookieString - }; - } - } - // `options.cache` - const { cache } = options; - if (cache) { - if (!cacheableStore.has(cache)) { - cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => { - const result = requestOptions[kRequest](requestOptions, handler); - // TODO: remove this when `cacheable-request` supports async request functions. - if (is_1.default.promise(result)) { - // @ts-expect-error - // We only need to implement the error handler in order to support HTTP2 caching. - // The result will be a promise anyway. - result.once = (event, handler) => { - if (event === 'error') { - result.catch(handler); - } - else if (event === 'abort') { - // The empty catch is needed here in case when - // it rejects before it's `await`ed in `_makeRequest`. - (async () => { - try { - const request = (await result); - request.once('abort', handler); - } - catch (_a) { } - })(); - } - else { - /* istanbul ignore next: safety check */ - throw new Error(`Unknown HTTP2 promise event: ${event}`); - } - return result; - }; - } - return result; - }), cache)); - } - } - // `options.cacheOptions` - options.cacheOptions = { ...options.cacheOptions }; - // `options.dnsCache` - if (options.dnsCache === true) { - if (!globalDnsCache) { - globalDnsCache = new cacheable_lookup_1.default(); - } - options.dnsCache = globalDnsCache; - } - else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) { - throw new TypeError(`Parameter \`dnsCache\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`); - } - // `options.timeout` - if (is_1.default.number(options.timeout)) { - options.timeout = { request: options.timeout }; - } - else if (defaults && options.timeout !== defaults.timeout) { - options.timeout = { - ...defaults.timeout, - ...options.timeout - }; - } - else { - options.timeout = { ...options.timeout }; - } - // `options.context` - if (!options.context) { - options.context = {}; - } - // `options.hooks` - const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks); - options.hooks = { ...options.hooks }; - for (const event of exports.knownHookEvents) { - if (event in options.hooks) { - if (is_1.default.array(options.hooks[event])) { - // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044 - options.hooks[event] = [...options.hooks[event]]; - } - else { - throw new TypeError(`Parameter \`${event}\` must be an Array, got ${is_1.default(options.hooks[event])}`); - } - } - else { - options.hooks[event] = []; + // `options.context` + if (!options.context) { + options.context = {}; + } + // `options.hooks` + const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks); + options.hooks = { ...options.hooks }; + for (const event of exports.knownHookEvents) { + if (event in options.hooks) { + if (is_1.default.array(options.hooks[event])) { + // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044 + options.hooks[event] = [...options.hooks[event]]; + } + else { + throw new TypeError(`Parameter \`${event}\` must be an Array, got ${is_1.default(options.hooks[event])}`); + } + } + else { + options.hooks[event] = []; } } if (defaults && !areHooksDefault) { @@ -16869,6 +19149,14 @@ class Request extends stream_1.Duplex { const redirectUrl = new url_1.URL(redirectBuffer, url); const redirectString = redirectUrl.toString(); decodeURI(redirectString); + // eslint-disable-next-line no-inner-declarations + function isUnixSocketURL(url) { + return url.protocol === 'unix:' || url.hostname === 'unix'; + } + if (!isUnixSocketURL(url) && isUnixSocketURL(redirectUrl)) { + this._beforeError(new RequestError('Cannot redirect to UNIX socket', {}, this)); + return; + } // Redirecting to a different site, clear sensitive data. if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) { if ('host' in options.headers) { @@ -16938,10546 +19226,34741 @@ class Request extends stream_1.Duplex { destination.statusCode = statusCode; } } - async _onResponse(response) { - try { - await this._onResponseBase(response); - } - catch (error) { - /* istanbul ignore next: better safe than sorry */ - this._beforeError(error); - } + async _onResponse(response) { + try { + await this._onResponseBase(response); + } + catch (error) { + /* istanbul ignore next: better safe than sorry */ + this._beforeError(error); + } + } + _onRequest(request) { + const { options } = this; + const { timeout, url } = options; + http_timer_1.default(request); + this[kCancelTimeouts] = timed_out_1.default(request, timeout, url); + const responseEventName = options.cache ? 'cacheableResponse' : 'response'; + request.once(responseEventName, (response) => { + void this._onResponse(response); + }); + request.once('error', (error) => { + var _a; + // Force clean-up, because some packages (e.g. nock) don't do this. + request.destroy(); + // Node.js <= 12.18.2 mistakenly emits the response `end` first. + (_a = request.res) === null || _a === void 0 ? void 0 : _a.removeAllListeners('end'); + error = error instanceof timed_out_1.TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this); + this._beforeError(error); + }); + this[kUnproxyEvents] = proxy_events_1.default(request, this, proxiedRequestEvents); + this[kRequest] = request; + this.emit('uploadProgress', this.uploadProgress); + // Send body + const body = this[kBody]; + const currentRequest = this.redirects.length === 0 ? this : request; + if (is_1.default.nodeStream(body)) { + body.pipe(currentRequest); + body.once('error', (error) => { + this._beforeError(new UploadError(error, this)); + }); + } + else { + this._unlockWrite(); + if (!is_1.default.undefined(body)) { + this._writeRequest(body, undefined, () => { }); + currentRequest.end(); + this._lockWrite(); + } + else if (this._cannotHaveBody || this._noPipe) { + currentRequest.end(); + this._lockWrite(); + } + } + this.emit('request', request); + } + async _createCacheableRequest(url, options) { + return new Promise((resolve, reject) => { + // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed + Object.assign(options, url_to_options_1.default(url)); + // `http-cache-semantics` checks this + // TODO: Fix this ignore. + // @ts-expect-error + delete options.url; + let request; + // This is ugly + const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => { + // TODO: Fix `cacheable-response` + response._readableState.autoDestroy = false; + if (request) { + (await request).emit('cacheableResponse', response); + } + resolve(response); + }); + // Restore options + options.url = url; + cacheRequest.once('error', reject); + cacheRequest.once('request', async (requestOrPromise) => { + request = requestOrPromise; + resolve(request); + }); + }); + } + async _makeRequest() { + var _a, _b, _c, _d, _e; + const { options } = this; + const { headers } = options; + for (const key in headers) { + if (is_1.default.undefined(headers[key])) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete headers[key]; + } + else if (is_1.default.null_(headers[key])) { + throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`); + } + } + if (options.decompress && is_1.default.undefined(headers['accept-encoding'])) { + headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate'; + } + // Set cookies + if (options.cookieJar) { + const cookieString = await options.cookieJar.getCookieString(options.url.toString()); + if (is_1.default.nonEmptyString(cookieString)) { + options.headers.cookie = cookieString; + } + } + for (const hook of options.hooks.beforeRequest) { + // eslint-disable-next-line no-await-in-loop + const result = await hook(options); + if (!is_1.default.undefined(result)) { + // @ts-expect-error Skip the type mismatch to support abstract responses + options.request = () => result; + break; + } + } + if (options.body && this[kBody] !== options.body) { + this[kBody] = options.body; + } + const { agent, request, timeout, url } = options; + if (options.dnsCache && !('lookup' in options)) { + options.lookup = options.dnsCache.lookup; + } + // UNIX sockets + if (url.hostname === 'unix') { + const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`); + if (matches === null || matches === void 0 ? void 0 : matches.groups) { + const { socketPath, path } = matches.groups; + Object.assign(options, { + socketPath, + path, + host: '' + }); + } + } + const isHttps = url.protocol === 'https:'; + // Fallback function + let fallbackFn; + if (options.http2) { + fallbackFn = http2wrapper.auto; + } + else { + fallbackFn = isHttps ? https.request : http.request; + } + const realFn = (_a = options.request) !== null && _a !== void 0 ? _a : fallbackFn; + // Cache support + const fn = options.cache ? this._createCacheableRequest : realFn; + // Pass an agent directly when HTTP2 is disabled + if (agent && !options.http2) { + options.agent = agent[isHttps ? 'https' : 'http']; + } + // Prepare plain HTTP request options + options[kRequest] = realFn; + delete options.request; + // TODO: Fix this ignore. + // @ts-expect-error + delete options.timeout; + const requestOptions = options; + requestOptions.shared = (_b = options.cacheOptions) === null || _b === void 0 ? void 0 : _b.shared; + requestOptions.cacheHeuristic = (_c = options.cacheOptions) === null || _c === void 0 ? void 0 : _c.cacheHeuristic; + requestOptions.immutableMinTimeToLive = (_d = options.cacheOptions) === null || _d === void 0 ? void 0 : _d.immutableMinTimeToLive; + requestOptions.ignoreCargoCult = (_e = options.cacheOptions) === null || _e === void 0 ? void 0 : _e.ignoreCargoCult; + // If `dnsLookupIpVersion` is not present do not override `family` + if (options.dnsLookupIpVersion !== undefined) { + try { + requestOptions.family = dns_ip_version_1.dnsLookupIpVersionToFamily(options.dnsLookupIpVersion); + } + catch (_f) { + throw new Error('Invalid `dnsLookupIpVersion` option value'); + } + } + // HTTPS options remapping + if (options.https) { + if ('rejectUnauthorized' in options.https) { + requestOptions.rejectUnauthorized = options.https.rejectUnauthorized; + } + if (options.https.checkServerIdentity) { + requestOptions.checkServerIdentity = options.https.checkServerIdentity; + } + if (options.https.certificateAuthority) { + requestOptions.ca = options.https.certificateAuthority; + } + if (options.https.certificate) { + requestOptions.cert = options.https.certificate; + } + if (options.https.key) { + requestOptions.key = options.https.key; + } + if (options.https.passphrase) { + requestOptions.passphrase = options.https.passphrase; + } + if (options.https.pfx) { + requestOptions.pfx = options.https.pfx; + } + } + try { + let requestOrResponse = await fn(url, requestOptions); + if (is_1.default.undefined(requestOrResponse)) { + requestOrResponse = fallbackFn(url, requestOptions); + } + // Restore options + options.request = request; + options.timeout = timeout; + options.agent = agent; + // HTTPS options restore + if (options.https) { + if ('rejectUnauthorized' in options.https) { + delete requestOptions.rejectUnauthorized; + } + if (options.https.checkServerIdentity) { + // @ts-expect-error - This one will be removed when we remove the alias. + delete requestOptions.checkServerIdentity; + } + if (options.https.certificateAuthority) { + delete requestOptions.ca; + } + if (options.https.certificate) { + delete requestOptions.cert; + } + if (options.https.key) { + delete requestOptions.key; + } + if (options.https.passphrase) { + delete requestOptions.passphrase; + } + if (options.https.pfx) { + delete requestOptions.pfx; + } + } + if (isClientRequest(requestOrResponse)) { + this._onRequest(requestOrResponse); + // Emit the response after the stream has been ended + } + else if (this.writable) { + this.once('finish', () => { + void this._onResponse(requestOrResponse); + }); + this._unlockWrite(); + this.end(); + this._lockWrite(); + } + else { + void this._onResponse(requestOrResponse); + } + } + catch (error) { + if (error instanceof CacheableRequest.CacheError) { + throw new CacheError(error, this); + } + throw new RequestError(error.message, error, this); + } + } + async _error(error) { + try { + for (const hook of this.options.hooks.beforeError) { + // eslint-disable-next-line no-await-in-loop + error = await hook(error); + } + } + catch (error_) { + error = new RequestError(error_.message, error_, this); + } + this.destroy(error); + } + _beforeError(error) { + if (this[kStopReading]) { + return; + } + const { options } = this; + const retryCount = this.retryCount + 1; + this[kStopReading] = true; + if (!(error instanceof RequestError)) { + error = new RequestError(error.message, error, this); + } + const typedError = error; + const { response } = typedError; + void (async () => { + if (response && !response.body) { + response.setEncoding(this._readableState.encoding); + try { + response.rawBody = await get_buffer_1.default(response); + response.body = response.rawBody.toString(); + } + catch (_a) { } + } + if (this.listenerCount('retry') !== 0) { + let backoff; + try { + let retryAfter; + if (response && 'retry-after' in response.headers) { + retryAfter = Number(response.headers['retry-after']); + if (Number.isNaN(retryAfter)) { + retryAfter = Date.parse(response.headers['retry-after']) - Date.now(); + if (retryAfter <= 0) { + retryAfter = 1; + } + } + else { + retryAfter *= 1000; + } + } + backoff = await options.retry.calculateDelay({ + attemptCount: retryCount, + retryOptions: options.retry, + error: typedError, + retryAfter, + computedValue: calculate_retry_delay_1.default({ + attemptCount: retryCount, + retryOptions: options.retry, + error: typedError, + retryAfter, + computedValue: 0 + }) + }); + } + catch (error_) { + void this._error(new RequestError(error_.message, error_, this)); + return; + } + if (backoff) { + const retry = async () => { + try { + for (const hook of this.options.hooks.beforeRetry) { + // eslint-disable-next-line no-await-in-loop + await hook(this.options, typedError, retryCount); + } + } + catch (error_) { + void this._error(new RequestError(error_.message, error, this)); + return; + } + // Something forced us to abort the retry + if (this.destroyed) { + return; + } + this.destroy(); + this.emit('retry', retryCount, error); + }; + this[kRetryTimeout] = setTimeout(retry, backoff); + return; + } + } + void this._error(typedError); + })(); + } + _read() { + this[kTriggerRead] = true; + const response = this[kResponse]; + if (response && !this[kStopReading]) { + // We cannot put this in the `if` above + // because `.read()` also triggers the `end` event + if (response.readableLength) { + this[kTriggerRead] = false; + } + let data; + while ((data = response.read()) !== null) { + this[kDownloadedSize] += data.length; + this[kStartedReading] = true; + const progress = this.downloadProgress; + if (progress.percent < 1) { + this.emit('downloadProgress', progress); + } + this.push(data); + } + } + } + // Node.js 12 has incorrect types, so the encoding must be a string + _write(chunk, encoding, callback) { + const write = () => { + this._writeRequest(chunk, encoding, callback); + }; + if (this.requestInitialized) { + write(); + } + else { + this[kJobs].push(write); + } + } + _writeRequest(chunk, encoding, callback) { + if (this[kRequest].destroyed) { + // Probably the `ClientRequest` instance will throw + return; + } + this._progressCallbacks.push(() => { + this[kUploadedSize] += Buffer.byteLength(chunk, encoding); + const progress = this.uploadProgress; + if (progress.percent < 1) { + this.emit('uploadProgress', progress); + } + }); + // TODO: What happens if it's from cache? Then this[kRequest] won't be defined. + this[kRequest].write(chunk, encoding, (error) => { + if (!error && this._progressCallbacks.length > 0) { + this._progressCallbacks.shift()(); + } + callback(error); + }); + } + _final(callback) { + const endRequest = () => { + // FIX: Node.js 10 calls the write callback AFTER the end callback! + while (this._progressCallbacks.length !== 0) { + this._progressCallbacks.shift()(); + } + // We need to check if `this[kRequest]` is present, + // because it isn't when we use cache. + if (!(kRequest in this)) { + callback(); + return; + } + if (this[kRequest].destroyed) { + callback(); + return; + } + this[kRequest].end((error) => { + if (!error) { + this[kBodySize] = this[kUploadedSize]; + this.emit('uploadProgress', this.uploadProgress); + this[kRequest].emit('upload-complete'); + } + callback(error); + }); + }; + if (this.requestInitialized) { + endRequest(); + } + else { + this[kJobs].push(endRequest); + } + } + _destroy(error, callback) { + var _a; + this[kStopReading] = true; + // Prevent further retries + clearTimeout(this[kRetryTimeout]); + if (kRequest in this) { + this[kCancelTimeouts](); + // TODO: Remove the next `if` when these get fixed: + // - https://github.com/nodejs/node/issues/32851 + if (!((_a = this[kResponse]) === null || _a === void 0 ? void 0 : _a.complete)) { + this[kRequest].destroy(); + } + } + if (error !== null && !is_1.default.undefined(error) && !(error instanceof RequestError)) { + error = new RequestError(error.message, error, this); + } + callback(error); + } + get _isAboutToError() { + return this[kStopReading]; + } + /** + The remote IP address. + */ + get ip() { + var _a; + return (_a = this.socket) === null || _a === void 0 ? void 0 : _a.remoteAddress; + } + /** + Indicates whether the request has been aborted or not. + */ + get aborted() { + var _a, _b, _c; + return ((_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroyed) !== null && _b !== void 0 ? _b : this.destroyed) && !((_c = this[kOriginalResponse]) === null || _c === void 0 ? void 0 : _c.complete); + } + get socket() { + var _a, _b; + return (_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.socket) !== null && _b !== void 0 ? _b : undefined; + } + /** + Progress event for downloading (receiving a response). + */ + get downloadProgress() { + let percent; + if (this[kResponseSize]) { + percent = this[kDownloadedSize] / this[kResponseSize]; + } + else if (this[kResponseSize] === this[kDownloadedSize]) { + percent = 1; + } + else { + percent = 0; + } + return { + percent, + transferred: this[kDownloadedSize], + total: this[kResponseSize] + }; + } + /** + Progress event for uploading (sending a request). + */ + get uploadProgress() { + let percent; + if (this[kBodySize]) { + percent = this[kUploadedSize] / this[kBodySize]; + } + else if (this[kBodySize] === this[kUploadedSize]) { + percent = 1; + } + else { + percent = 0; + } + return { + percent, + transferred: this[kUploadedSize], + total: this[kBodySize] + }; + } + /** + The object contains the following properties: + + - `start` - Time when the request started. + - `socket` - Time when a socket was assigned to the request. + - `lookup` - Time when the DNS lookup finished. + - `connect` - Time when the socket successfully connected. + - `secureConnect` - Time when the socket securely connected. + - `upload` - Time when the request finished uploading. + - `response` - Time when the request fired `response` event. + - `end` - Time when the response fired `end` event. + - `error` - Time when the request fired `error` event. + - `abort` - Time when the request fired `abort` event. + - `phases` + - `wait` - `timings.socket - timings.start` + - `dns` - `timings.lookup - timings.socket` + - `tcp` - `timings.connect - timings.lookup` + - `tls` - `timings.secureConnect - timings.connect` + - `request` - `timings.upload - (timings.secureConnect || timings.connect)` + - `firstByte` - `timings.response - timings.upload` + - `download` - `timings.end - timings.response` + - `total` - `(timings.end || timings.error || timings.abort) - timings.start` + + If something has not been measured yet, it will be `undefined`. + + __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch. + */ + get timings() { + var _a; + return (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.timings; + } + /** + Whether the response was retrieved from the cache. + */ + get isFromCache() { + return this[kIsFromCache]; + } + pipe(destination, options) { + if (this[kStartedReading]) { + throw new Error('Failed to pipe. The response has been emitted already.'); + } + if (destination instanceof http_1.ServerResponse) { + this[kServerResponsesPiped].add(destination); + } + return super.pipe(destination, options); + } + unpipe(destination) { + if (destination instanceof http_1.ServerResponse) { + this[kServerResponsesPiped].delete(destination); + } + super.unpipe(destination); + return this; + } +} +exports["default"] = Request; + + +/***/ }), + +/***/ 4993: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.dnsLookupIpVersionToFamily = exports.isDnsLookupIpVersion = void 0; +const conversionTable = { + auto: 0, + ipv4: 4, + ipv6: 6 +}; +exports.isDnsLookupIpVersion = (value) => { + return value in conversionTable; +}; +exports.dnsLookupIpVersionToFamily = (dnsLookupIpVersion) => { + if (exports.isDnsLookupIpVersion(dnsLookupIpVersion)) { + return conversionTable[dnsLookupIpVersion]; + } + throw new Error('Invalid DNS lookup IP version'); +}; + + +/***/ }), + +/***/ 4564: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fs_1 = __nccwpck_require__(7147); +const util_1 = __nccwpck_require__(3837); +const is_1 = __nccwpck_require__(7678); +const is_form_data_1 = __nccwpck_require__(40); +const statAsync = util_1.promisify(fs_1.stat); +exports["default"] = async (body, headers) => { + if (headers && 'content-length' in headers) { + return Number(headers['content-length']); + } + if (!body) { + return 0; + } + if (is_1.default.string(body)) { + return Buffer.byteLength(body); + } + if (is_1.default.buffer(body)) { + return body.length; + } + if (is_form_data_1.default(body)) { + return util_1.promisify(body.getLength.bind(body))(); + } + if (body instanceof fs_1.ReadStream) { + const { size } = await statAsync(body.path); + if (size === 0) { + return undefined; + } + return size; + } + return undefined; +}; + + +/***/ }), + +/***/ 4500: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +// TODO: Update https://github.com/sindresorhus/get-stream +const getBuffer = async (stream) => { + const chunks = []; + let length = 0; + for await (const chunk of stream) { + chunks.push(chunk); + length += Buffer.byteLength(chunk); + } + if (Buffer.isBuffer(chunks[0])) { + return Buffer.concat(chunks, length); + } + return Buffer.from(chunks.join('')); +}; +exports["default"] = getBuffer; + + +/***/ }), + +/***/ 40: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const is_1 = __nccwpck_require__(7678); +exports["default"] = (body) => is_1.default.nodeStream(body) && is_1.default.function_(body.getBoundary); + + +/***/ }), + +/***/ 9298: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isResponseOk = void 0; +exports.isResponseOk = (response) => { + const { statusCode } = response; + const limitStatusCode = response.request.options.followRedirect ? 299 : 399; + return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304; +}; + + +/***/ }), + +/***/ 9219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +/* istanbul ignore file: deprecated */ +const url_1 = __nccwpck_require__(7310); +const keys = [ + 'protocol', + 'host', + 'hostname', + 'port', + 'pathname', + 'search' +]; +exports["default"] = (origin, options) => { + var _a, _b; + if (options.path) { + if (options.pathname) { + throw new TypeError('Parameters `path` and `pathname` are mutually exclusive.'); + } + if (options.search) { + throw new TypeError('Parameters `path` and `search` are mutually exclusive.'); + } + if (options.searchParams) { + throw new TypeError('Parameters `path` and `searchParams` are mutually exclusive.'); + } + } + if (options.search && options.searchParams) { + throw new TypeError('Parameters `search` and `searchParams` are mutually exclusive.'); + } + if (!origin) { + if (!options.protocol) { + throw new TypeError('No URL protocol specified'); + } + origin = `${options.protocol}//${(_b = (_a = options.hostname) !== null && _a !== void 0 ? _a : options.host) !== null && _b !== void 0 ? _b : ''}`; + } + const url = new url_1.URL(origin); + if (options.path) { + const searchIndex = options.path.indexOf('?'); + if (searchIndex === -1) { + options.pathname = options.path; + } + else { + options.pathname = options.path.slice(0, searchIndex); + options.search = options.path.slice(searchIndex + 1); + } + delete options.path; + } + for (const key of keys) { + if (options[key]) { + url[key] = options[key].toString(); + } + } + return url; +}; + + +/***/ }), + +/***/ 3021: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function default_1(from, to, events) { + const fns = {}; + for (const event of events) { + fns[event] = (...args) => { + to.emit(event, ...args); + }; + from.on(event, fns[event]); + } + return () => { + for (const event of events) { + from.off(event, fns[event]); + } + }; +} +exports["default"] = default_1; + + +/***/ }), + +/***/ 2454: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TimeoutError = void 0; +const net = __nccwpck_require__(1808); +const unhandle_1 = __nccwpck_require__(1593); +const reentry = Symbol('reentry'); +const noop = () => { }; +class TimeoutError extends Error { + constructor(threshold, event) { + super(`Timeout awaiting '${event}' for ${threshold}ms`); + this.event = event; + this.name = 'TimeoutError'; + this.code = 'ETIMEDOUT'; + } +} +exports.TimeoutError = TimeoutError; +exports["default"] = (request, delays, options) => { + if (reentry in request) { + return noop; + } + request[reentry] = true; + const cancelers = []; + const { once, unhandleAll } = unhandle_1.default(); + const addTimeout = (delay, callback, event) => { + var _a; + const timeout = setTimeout(callback, delay, delay, event); + (_a = timeout.unref) === null || _a === void 0 ? void 0 : _a.call(timeout); + const cancel = () => { + clearTimeout(timeout); + }; + cancelers.push(cancel); + return cancel; + }; + const { host, hostname } = options; + const timeoutHandler = (delay, event) => { + request.destroy(new TimeoutError(delay, event)); + }; + const cancelTimeouts = () => { + for (const cancel of cancelers) { + cancel(); + } + unhandleAll(); + }; + request.once('error', error => { + cancelTimeouts(); + // Save original behavior + /* istanbul ignore next */ + if (request.listenerCount('error') === 0) { + throw error; + } + }); + request.once('close', cancelTimeouts); + once(request, 'response', (response) => { + once(response, 'end', cancelTimeouts); + }); + if (typeof delays.request !== 'undefined') { + addTimeout(delays.request, timeoutHandler, 'request'); + } + if (typeof delays.socket !== 'undefined') { + const socketTimeoutHandler = () => { + timeoutHandler(delays.socket, 'socket'); + }; + request.setTimeout(delays.socket, socketTimeoutHandler); + // `request.setTimeout(0)` causes a memory leak. + // We can just remove the listener and forget about the timer - it's unreffed. + // See https://github.com/sindresorhus/got/issues/690 + cancelers.push(() => { + request.removeListener('timeout', socketTimeoutHandler); + }); + } + once(request, 'socket', (socket) => { + var _a; + const { socketPath } = request; + /* istanbul ignore next: hard to test */ + if (socket.connecting) { + const hasPath = Boolean(socketPath !== null && socketPath !== void 0 ? socketPath : net.isIP((_a = hostname !== null && hostname !== void 0 ? hostname : host) !== null && _a !== void 0 ? _a : '') !== 0); + if (typeof delays.lookup !== 'undefined' && !hasPath && typeof socket.address().address === 'undefined') { + const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup'); + once(socket, 'lookup', cancelTimeout); + } + if (typeof delays.connect !== 'undefined') { + const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect'); + if (hasPath) { + once(socket, 'connect', timeConnect()); + } + else { + once(socket, 'lookup', (error) => { + if (error === null) { + once(socket, 'connect', timeConnect()); + } + }); + } + } + if (typeof delays.secureConnect !== 'undefined' && options.protocol === 'https:') { + once(socket, 'connect', () => { + const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect'); + once(socket, 'secureConnect', cancelTimeout); + }); + } + } + if (typeof delays.send !== 'undefined') { + const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send'); + /* istanbul ignore next: hard to test */ + if (socket.connecting) { + once(socket, 'connect', () => { + once(request, 'upload-complete', timeRequest()); + }); + } + else { + once(request, 'upload-complete', timeRequest()); + } + } + }); + if (typeof delays.response !== 'undefined') { + once(request, 'upload-complete', () => { + const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response'); + once(request, 'response', cancelTimeout); + }); + } + return cancelTimeouts; +}; + + +/***/ }), + +/***/ 1593: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +// When attaching listeners, it's very easy to forget about them. +// Especially if you do error handling and set timeouts. +// So instead of checking if it's proper to throw an error on every timeout ever, +// use this simple tool which will remove all listeners you have attached. +exports["default"] = () => { + const handlers = []; + return { + once(origin, event, fn) { + origin.once(event, fn); + handlers.push({ origin, event, fn }); + }, + unhandleAll() { + for (const handler of handlers) { + const { origin, event, fn } = handler; + origin.removeListener(event, fn); + } + handlers.length = 0; + } + }; +}; + + +/***/ }), + +/***/ 8026: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const is_1 = __nccwpck_require__(7678); +exports["default"] = (url) => { + // Cast to URL + url = url; + const options = { + protocol: url.protocol, + hostname: is_1.default.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname, + host: url.host, + hash: url.hash, + search: url.search, + pathname: url.pathname, + href: url.href, + path: `${url.pathname || ''}${url.search || ''}` + }; + if (is_1.default.string(url.port) && url.port.length > 0) { + options.port = Number(url.port); + } + if (url.username || url.password) { + options.auth = `${url.username || ''}:${url.password || ''}`; + } + return options; +}; + + +/***/ }), + +/***/ 7288: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +class WeakableMap { + constructor() { + this.weakMap = new WeakMap(); + this.map = new Map(); + } + set(key, value) { + if (typeof key === 'object') { + this.weakMap.set(key, value); + } + else { + this.map.set(key, value); + } + } + get(key) { + if (typeof key === 'object') { + return this.weakMap.get(key); + } + return this.map.get(key); + } + has(key) { + if (typeof key === 'object') { + return this.weakMap.has(key); + } + return this.map.has(key); + } +} +exports["default"] = WeakableMap; + + +/***/ }), + +/***/ 4337: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultHandler = void 0; +const is_1 = __nccwpck_require__(7678); +const as_promise_1 = __nccwpck_require__(6056); +const create_rejection_1 = __nccwpck_require__(6457); +const core_1 = __nccwpck_require__(94); +const deep_freeze_1 = __nccwpck_require__(285); +const errors = { + RequestError: as_promise_1.RequestError, + CacheError: as_promise_1.CacheError, + ReadError: as_promise_1.ReadError, + HTTPError: as_promise_1.HTTPError, + MaxRedirectsError: as_promise_1.MaxRedirectsError, + TimeoutError: as_promise_1.TimeoutError, + ParseError: as_promise_1.ParseError, + CancelError: as_promise_1.CancelError, + UnsupportedProtocolError: as_promise_1.UnsupportedProtocolError, + UploadError: as_promise_1.UploadError +}; +// The `delay` package weighs 10KB (!) +const delay = async (ms) => new Promise(resolve => { + setTimeout(resolve, ms); +}); +const { normalizeArguments } = core_1.default; +const mergeOptions = (...sources) => { + let mergedOptions; + for (const source of sources) { + mergedOptions = normalizeArguments(undefined, source, mergedOptions); + } + return mergedOptions; +}; +const getPromiseOrStream = (options) => options.isStream ? new core_1.default(undefined, options) : as_promise_1.default(options); +const isGotInstance = (value) => ('defaults' in value && 'options' in value.defaults); +const aliases = [ + 'get', + 'post', + 'put', + 'patch', + 'head', + 'delete' +]; +exports.defaultHandler = (options, next) => next(options); +const callInitHooks = (hooks, options) => { + if (hooks) { + for (const hook of hooks) { + hook(options); + } + } +}; +const create = (defaults) => { + // Proxy properties from next handlers + defaults._rawHandlers = defaults.handlers; + defaults.handlers = defaults.handlers.map(fn => ((options, next) => { + // This will be assigned by assigning result + let root; + const result = fn(options, newOptions => { + root = next(newOptions); + return root; + }); + if (result !== root && !options.isStream && root) { + const typedResult = result; + const { then: promiseThen, catch: promiseCatch, finally: promiseFianlly } = typedResult; + Object.setPrototypeOf(typedResult, Object.getPrototypeOf(root)); + Object.defineProperties(typedResult, Object.getOwnPropertyDescriptors(root)); + // These should point to the new promise + // eslint-disable-next-line promise/prefer-await-to-then + typedResult.then = promiseThen; + typedResult.catch = promiseCatch; + typedResult.finally = promiseFianlly; + } + return result; + })); + // Got interface + const got = ((url, options = {}, _defaults) => { + var _a, _b; + let iteration = 0; + const iterateHandlers = (newOptions) => { + return defaults.handlers[iteration++](newOptions, iteration === defaults.handlers.length ? getPromiseOrStream : iterateHandlers); + }; + // TODO: Remove this in Got 12. + if (is_1.default.plainObject(url)) { + const mergedOptions = { + ...url, + ...options + }; + core_1.setNonEnumerableProperties([url, options], mergedOptions); + options = mergedOptions; + url = undefined; + } + try { + // Call `init` hooks + let initHookError; + try { + callInitHooks(defaults.options.hooks.init, options); + callInitHooks((_a = options.hooks) === null || _a === void 0 ? void 0 : _a.init, options); + } + catch (error) { + initHookError = error; + } + // Normalize options & call handlers + const normalizedOptions = normalizeArguments(url, options, _defaults !== null && _defaults !== void 0 ? _defaults : defaults.options); + normalizedOptions[core_1.kIsNormalizedAlready] = true; + if (initHookError) { + throw new as_promise_1.RequestError(initHookError.message, initHookError, normalizedOptions); + } + return iterateHandlers(normalizedOptions); + } + catch (error) { + if (options.isStream) { + throw error; + } + else { + return create_rejection_1.default(error, defaults.options.hooks.beforeError, (_b = options.hooks) === null || _b === void 0 ? void 0 : _b.beforeError); + } + } + }); + got.extend = (...instancesOrOptions) => { + const optionsArray = [defaults.options]; + let handlers = [...defaults._rawHandlers]; + let isMutableDefaults; + for (const value of instancesOrOptions) { + if (isGotInstance(value)) { + optionsArray.push(value.defaults.options); + handlers.push(...value.defaults._rawHandlers); + isMutableDefaults = value.defaults.mutableDefaults; + } + else { + optionsArray.push(value); + if ('handlers' in value) { + handlers.push(...value.handlers); + } + isMutableDefaults = value.mutableDefaults; + } + } + handlers = handlers.filter(handler => handler !== exports.defaultHandler); + if (handlers.length === 0) { + handlers.push(exports.defaultHandler); + } + return create({ + options: mergeOptions(...optionsArray), + handlers, + mutableDefaults: Boolean(isMutableDefaults) + }); + }; + // Pagination + const paginateEach = (async function* (url, options) { + // TODO: Remove this `@ts-expect-error` when upgrading to TypeScript 4. + // Error: Argument of type 'Merge> | undefined' is not assignable to parameter of type 'Options | undefined'. + // @ts-expect-error + let normalizedOptions = normalizeArguments(url, options, defaults.options); + normalizedOptions.resolveBodyOnly = false; + const pagination = normalizedOptions.pagination; + if (!is_1.default.object(pagination)) { + throw new TypeError('`options.pagination` must be implemented'); + } + const all = []; + let { countLimit } = pagination; + let numberOfRequests = 0; + while (numberOfRequests < pagination.requestLimit) { + if (numberOfRequests !== 0) { + // eslint-disable-next-line no-await-in-loop + await delay(pagination.backoff); + } + // @ts-expect-error FIXME! + // TODO: Throw when result is not an instance of Response + // eslint-disable-next-line no-await-in-loop + const result = (await got(undefined, undefined, normalizedOptions)); + // eslint-disable-next-line no-await-in-loop + const parsed = await pagination.transform(result); + const current = []; + for (const item of parsed) { + if (pagination.filter(item, all, current)) { + if (!pagination.shouldContinue(item, all, current)) { + return; + } + yield item; + if (pagination.stackAllItems) { + all.push(item); + } + current.push(item); + if (--countLimit <= 0) { + return; + } + } + } + const optionsToMerge = pagination.paginate(result, all, current); + if (optionsToMerge === false) { + return; + } + if (optionsToMerge === result.request.options) { + normalizedOptions = result.request.options; + } + else if (optionsToMerge !== undefined) { + normalizedOptions = normalizeArguments(undefined, optionsToMerge, normalizedOptions); + } + numberOfRequests++; + } + }); + got.paginate = paginateEach; + got.paginate.all = (async (url, options) => { + const results = []; + for await (const item of paginateEach(url, options)) { + results.push(item); + } + return results; + }); + // For those who like very descriptive names + got.paginate.each = paginateEach; + // Stream API + got.stream = ((url, options) => got(url, { ...options, isStream: true })); + // Shortcuts + for (const method of aliases) { + got[method] = ((url, options) => got(url, { ...options, method })); + got.stream[method] = ((url, options) => { + return got(url, { ...options, method, isStream: true }); + }); + } + Object.assign(got, errors); + Object.defineProperty(got, 'defaults', { + value: defaults.mutableDefaults ? defaults : deep_freeze_1.default(defaults), + writable: defaults.mutableDefaults, + configurable: defaults.mutableDefaults, + enumerable: true + }); + got.mergeOptions = mergeOptions; + return got; +}; +exports["default"] = create; +__exportStar(__nccwpck_require__(2613), exports); + + +/***/ }), + +/***/ 3061: +/***/ (function(module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const url_1 = __nccwpck_require__(7310); +const create_1 = __nccwpck_require__(4337); +const defaults = { + options: { + method: 'GET', + retry: { + limit: 2, + methods: [ + 'GET', + 'PUT', + 'HEAD', + 'DELETE', + 'OPTIONS', + 'TRACE' + ], + statusCodes: [ + 408, + 413, + 429, + 500, + 502, + 503, + 504, + 521, + 522, + 524 + ], + errorCodes: [ + 'ETIMEDOUT', + 'ECONNRESET', + 'EADDRINUSE', + 'ECONNREFUSED', + 'EPIPE', + 'ENOTFOUND', + 'ENETUNREACH', + 'EAI_AGAIN' + ], + maxRetryAfter: undefined, + calculateDelay: ({ computedValue }) => computedValue + }, + timeout: {}, + headers: { + 'user-agent': 'got (https://github.com/sindresorhus/got)' + }, + hooks: { + init: [], + beforeRequest: [], + beforeRedirect: [], + beforeRetry: [], + beforeError: [], + afterResponse: [] + }, + cache: undefined, + dnsCache: undefined, + decompress: true, + throwHttpErrors: true, + followRedirect: true, + isStream: false, + responseType: 'text', + resolveBodyOnly: false, + maxRedirects: 10, + prefixUrl: '', + methodRewriting: true, + ignoreInvalidCookies: false, + context: {}, + // TODO: Set this to `true` when Got 12 gets released + http2: false, + allowGetBody: false, + https: undefined, + pagination: { + transform: (response) => { + if (response.request.options.responseType === 'json') { + return response.body; + } + return JSON.parse(response.body); + }, + paginate: response => { + if (!Reflect.has(response.headers, 'link')) { + return false; + } + const items = response.headers.link.split(','); + let next; + for (const item of items) { + const parsed = item.split(';'); + if (parsed[1].includes('next')) { + next = parsed[0].trimStart().trim(); + next = next.slice(1, -1); + break; + } + } + if (next) { + const options = { + url: new url_1.URL(next) + }; + return options; + } + return false; + }, + filter: () => true, + shouldContinue: () => true, + countLimit: Infinity, + backoff: 0, + requestLimit: 10000, + stackAllItems: true + }, + parseJson: (text) => JSON.parse(text), + stringifyJson: (object) => JSON.stringify(object), + cacheOptions: {} + }, + handlers: [create_1.defaultHandler], + mutableDefaults: false +}; +const got = create_1.default(defaults); +exports["default"] = got; +// For CommonJS default export support +module.exports = got; +module.exports["default"] = got; +module.exports.__esModule = true; // Workaround for TS issue: https://github.com/sindresorhus/got/pull/1267 +__exportStar(__nccwpck_require__(4337), exports); +__exportStar(__nccwpck_require__(6056), exports); + + +/***/ }), + +/***/ 2613: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const is_1 = __nccwpck_require__(7678); +function deepFreeze(object) { + for (const value of Object.values(object)) { + if (is_1.default.plainObject(value) || is_1.default.array(value)) { + deepFreeze(value); + } + } + return Object.freeze(object); +} +exports["default"] = deepFreeze; + + +/***/ }), + +/***/ 397: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const alreadyWarned = new Set(); +exports["default"] = (message) => { + if (alreadyWarned.has(message)) { + return; + } + alreadyWarned.add(message); + // @ts-expect-error Missing types. + process.emitWarning(`Got: ${message}`, { + type: 'DeprecationWarning' + }); +}; + + +/***/ }), + +/***/ 1002: +/***/ ((module) => { + +"use strict"; + +// rfc7231 6.1 +const statusCodeCacheableByDefault = new Set([ + 200, + 203, + 204, + 206, + 300, + 301, + 308, + 404, + 405, + 410, + 414, + 501, +]); + +// This implementation does not understand partial responses (206) +const understoodStatuses = new Set([ + 200, + 203, + 204, + 300, + 301, + 302, + 303, + 307, + 308, + 404, + 405, + 410, + 414, + 501, +]); + +const errorStatusCodes = new Set([ + 500, + 502, + 503, + 504, +]); + +const hopByHopHeaders = { + date: true, // included, because we add Age update Date + connection: true, + 'keep-alive': true, + 'proxy-authenticate': true, + 'proxy-authorization': true, + te: true, + trailer: true, + 'transfer-encoding': true, + upgrade: true, +}; + +const excludedFromRevalidationUpdate = { + // Since the old body is reused, it doesn't make sense to change properties of the body + 'content-length': true, + 'content-encoding': true, + 'transfer-encoding': true, + 'content-range': true, +}; + +function toNumberOrZero(s) { + const n = parseInt(s, 10); + return isFinite(n) ? n : 0; +} + +// RFC 5861 +function isErrorResponse(response) { + // consider undefined response as faulty + if(!response) { + return true + } + return errorStatusCodes.has(response.status); +} + +function parseCacheControl(header) { + const cc = {}; + if (!header) return cc; + + // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives), + // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale + const parts = header.trim().split(/,/); + for (const part of parts) { + const [k, v] = part.split(/=/, 2); + cc[k.trim()] = v === undefined ? true : v.trim().replace(/^"|"$/g, ''); + } + + return cc; +} + +function formatCacheControl(cc) { + let parts = []; + for (const k in cc) { + const v = cc[k]; + parts.push(v === true ? k : k + '=' + v); + } + if (!parts.length) { + return undefined; + } + return parts.join(', '); +} + +module.exports = class CachePolicy { + constructor( + req, + res, + { + shared, + cacheHeuristic, + immutableMinTimeToLive, + ignoreCargoCult, + _fromObject, + } = {} + ) { + if (_fromObject) { + this._fromObject(_fromObject); + return; + } + + if (!res || !res.headers) { + throw Error('Response headers missing'); + } + this._assertRequestHasHeaders(req); + + this._responseTime = this.now(); + this._isShared = shared !== false; + this._cacheHeuristic = + undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE + this._immutableMinTtl = + undefined !== immutableMinTimeToLive + ? immutableMinTimeToLive + : 24 * 3600 * 1000; + + this._status = 'status' in res ? res.status : 200; + this._resHeaders = res.headers; + this._rescc = parseCacheControl(res.headers['cache-control']); + this._method = 'method' in req ? req.method : 'GET'; + this._url = req.url; + this._host = req.headers.host; + this._noAuthorization = !req.headers.authorization; + this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used + this._reqcc = parseCacheControl(req.headers['cache-control']); + + // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching, + // so there's no point stricly adhering to the blindly copy&pasted directives. + if ( + ignoreCargoCult && + 'pre-check' in this._rescc && + 'post-check' in this._rescc + ) { + delete this._rescc['pre-check']; + delete this._rescc['post-check']; + delete this._rescc['no-cache']; + delete this._rescc['no-store']; + delete this._rescc['must-revalidate']; + this._resHeaders = Object.assign({}, this._resHeaders, { + 'cache-control': formatCacheControl(this._rescc), + }); + delete this._resHeaders.expires; + delete this._resHeaders.pragma; + } + + // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive + // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1). + if ( + res.headers['cache-control'] == null && + /no-cache/.test(res.headers.pragma) + ) { + this._rescc['no-cache'] = true; + } + } + + now() { + return Date.now(); + } + + storable() { + // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it. + return !!( + !this._reqcc['no-store'] && + // A cache MUST NOT store a response to any request, unless: + // The request method is understood by the cache and defined as being cacheable, and + ('GET' === this._method || + 'HEAD' === this._method || + ('POST' === this._method && this._hasExplicitExpiration())) && + // the response status code is understood by the cache, and + understoodStatuses.has(this._status) && + // the "no-store" cache directive does not appear in request or response header fields, and + !this._rescc['no-store'] && + // the "private" response directive does not appear in the response, if the cache is shared, and + (!this._isShared || !this._rescc.private) && + // the Authorization header field does not appear in the request, if the cache is shared, + (!this._isShared || + this._noAuthorization || + this._allowsStoringAuthenticated()) && + // the response either: + // contains an Expires header field, or + (this._resHeaders.expires || + // contains a max-age response directive, or + // contains a s-maxage response directive and the cache is shared, or + // contains a public response directive. + this._rescc['max-age'] || + (this._isShared && this._rescc['s-maxage']) || + this._rescc.public || + // has a status code that is defined as cacheable by default + statusCodeCacheableByDefault.has(this._status)) + ); + } + + _hasExplicitExpiration() { + // 4.2.1 Calculating Freshness Lifetime + return ( + (this._isShared && this._rescc['s-maxage']) || + this._rescc['max-age'] || + this._resHeaders.expires + ); + } + + _assertRequestHasHeaders(req) { + if (!req || !req.headers) { + throw Error('Request headers missing'); + } + } + + satisfiesWithoutRevalidation(req) { + this._assertRequestHasHeaders(req); + + // When presented with a request, a cache MUST NOT reuse a stored response, unless: + // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive, + // unless the stored response is successfully validated (Section 4.3), and + const requestCC = parseCacheControl(req.headers['cache-control']); + if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) { + return false; + } + + if (requestCC['max-age'] && this.age() > requestCC['max-age']) { + return false; + } + + if ( + requestCC['min-fresh'] && + this.timeToLive() < 1000 * requestCC['min-fresh'] + ) { + return false; + } + + // the stored response is either: + // fresh, or allowed to be served stale + if (this.stale()) { + const allowsStale = + requestCC['max-stale'] && + !this._rescc['must-revalidate'] && + (true === requestCC['max-stale'] || + requestCC['max-stale'] > this.age() - this.maxAge()); + if (!allowsStale) { + return false; + } + } + + return this._requestMatches(req, false); + } + + _requestMatches(req, allowHeadMethod) { + // The presented effective request URI and that of the stored response match, and + return ( + (!this._url || this._url === req.url) && + this._host === req.headers.host && + // the request method associated with the stored response allows it to be used for the presented request, and + (!req.method || + this._method === req.method || + (allowHeadMethod && 'HEAD' === req.method)) && + // selecting header fields nominated by the stored response (if any) match those presented, and + this._varyMatches(req) + ); + } + + _allowsStoringAuthenticated() { + // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage. + return ( + this._rescc['must-revalidate'] || + this._rescc.public || + this._rescc['s-maxage'] + ); + } + + _varyMatches(req) { + if (!this._resHeaders.vary) { + return true; + } + + // A Vary header field-value of "*" always fails to match + if (this._resHeaders.vary === '*') { + return false; + } + + const fields = this._resHeaders.vary + .trim() + .toLowerCase() + .split(/\s*,\s*/); + for (const name of fields) { + if (req.headers[name] !== this._reqHeaders[name]) return false; + } + return true; + } + + _copyWithoutHopByHopHeaders(inHeaders) { + const headers = {}; + for (const name in inHeaders) { + if (hopByHopHeaders[name]) continue; + headers[name] = inHeaders[name]; + } + // 9.1. Connection + if (inHeaders.connection) { + const tokens = inHeaders.connection.trim().split(/\s*,\s*/); + for (const name of tokens) { + delete headers[name]; + } + } + if (headers.warning) { + const warnings = headers.warning.split(/,/).filter(warning => { + return !/^\s*1[0-9][0-9]/.test(warning); + }); + if (!warnings.length) { + delete headers.warning; + } else { + headers.warning = warnings.join(',').trim(); + } + } + return headers; + } + + responseHeaders() { + const headers = this._copyWithoutHopByHopHeaders(this._resHeaders); + const age = this.age(); + + // A cache SHOULD generate 113 warning if it heuristically chose a freshness + // lifetime greater than 24 hours and the response's age is greater than 24 hours. + if ( + age > 3600 * 24 && + !this._hasExplicitExpiration() && + this.maxAge() > 3600 * 24 + ) { + headers.warning = + (headers.warning ? `${headers.warning}, ` : '') + + '113 - "rfc7234 5.5.4"'; + } + headers.age = `${Math.round(age)}`; + headers.date = new Date(this.now()).toUTCString(); + return headers; + } + + /** + * Value of the Date response header or current time if Date was invalid + * @return timestamp + */ + date() { + const serverDate = Date.parse(this._resHeaders.date); + if (isFinite(serverDate)) { + return serverDate; + } + return this._responseTime; + } + + /** + * Value of the Age header, in seconds, updated for the current time. + * May be fractional. + * + * @return Number + */ + age() { + let age = this._ageValue(); + + const residentTime = (this.now() - this._responseTime) / 1000; + return age + residentTime; + } + + _ageValue() { + return toNumberOrZero(this._resHeaders.age); + } + + /** + * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`. + * + * For an up-to-date value, see `timeToLive()`. + * + * @return Number + */ + maxAge() { + if (!this.storable() || this._rescc['no-cache']) { + return 0; + } + + // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default + // so this implementation requires explicit opt-in via public header + if ( + this._isShared && + (this._resHeaders['set-cookie'] && + !this._rescc.public && + !this._rescc.immutable) + ) { + return 0; + } + + if (this._resHeaders.vary === '*') { + return 0; + } + + if (this._isShared) { + if (this._rescc['proxy-revalidate']) { + return 0; + } + // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field. + if (this._rescc['s-maxage']) { + return toNumberOrZero(this._rescc['s-maxage']); + } + } + + // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field. + if (this._rescc['max-age']) { + return toNumberOrZero(this._rescc['max-age']); + } + + const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0; + + const serverDate = this.date(); + if (this._resHeaders.expires) { + const expires = Date.parse(this._resHeaders.expires); + // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired"). + if (Number.isNaN(expires) || expires < serverDate) { + return 0; + } + return Math.max(defaultMinTtl, (expires - serverDate) / 1000); + } + + if (this._resHeaders['last-modified']) { + const lastModified = Date.parse(this._resHeaders['last-modified']); + if (isFinite(lastModified) && serverDate > lastModified) { + return Math.max( + defaultMinTtl, + ((serverDate - lastModified) / 1000) * this._cacheHeuristic + ); + } + } + + return defaultMinTtl; + } + + timeToLive() { + const age = this.maxAge() - this.age(); + const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']); + const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']); + return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000; + } + + stale() { + return this.maxAge() <= this.age(); + } + + _useStaleIfError() { + return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age(); + } + + useStaleWhileRevalidate() { + return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age(); + } + + static fromObject(obj) { + return new this(undefined, undefined, { _fromObject: obj }); + } + + _fromObject(obj) { + if (this._responseTime) throw Error('Reinitialized'); + if (!obj || obj.v !== 1) throw Error('Invalid serialization'); + + this._responseTime = obj.t; + this._isShared = obj.sh; + this._cacheHeuristic = obj.ch; + this._immutableMinTtl = + obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000; + this._status = obj.st; + this._resHeaders = obj.resh; + this._rescc = obj.rescc; + this._method = obj.m; + this._url = obj.u; + this._host = obj.h; + this._noAuthorization = obj.a; + this._reqHeaders = obj.reqh; + this._reqcc = obj.reqcc; + } + + toObject() { + return { + v: 1, + t: this._responseTime, + sh: this._isShared, + ch: this._cacheHeuristic, + imm: this._immutableMinTtl, + st: this._status, + resh: this._resHeaders, + rescc: this._rescc, + m: this._method, + u: this._url, + h: this._host, + a: this._noAuthorization, + reqh: this._reqHeaders, + reqcc: this._reqcc, + }; + } + + /** + * Headers for sending to the origin server to revalidate stale response. + * Allows server to return 304 to allow reuse of the previous response. + * + * Hop by hop headers are always stripped. + * Revalidation headers may be added or removed, depending on request. + */ + revalidationHeaders(incomingReq) { + this._assertRequestHasHeaders(incomingReq); + const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers); + + // This implementation does not understand range requests + delete headers['if-range']; + + if (!this._requestMatches(incomingReq, true) || !this.storable()) { + // revalidation allowed via HEAD + // not for the same resource, or wasn't allowed to be cached anyway + delete headers['if-none-match']; + delete headers['if-modified-since']; + return headers; + } + + /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */ + if (this._resHeaders.etag) { + headers['if-none-match'] = headers['if-none-match'] + ? `${headers['if-none-match']}, ${this._resHeaders.etag}` + : this._resHeaders.etag; + } + + // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request. + const forbidsWeakValidators = + headers['accept-ranges'] || + headers['if-match'] || + headers['if-unmodified-since'] || + (this._method && this._method != 'GET'); + + /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server. + Note: This implementation does not understand partial responses (206) */ + if (forbidsWeakValidators) { + delete headers['if-modified-since']; + + if (headers['if-none-match']) { + const etags = headers['if-none-match'] + .split(/,/) + .filter(etag => { + return !/^\s*W\//.test(etag); + }); + if (!etags.length) { + delete headers['if-none-match']; + } else { + headers['if-none-match'] = etags.join(',').trim(); + } + } + } else if ( + this._resHeaders['last-modified'] && + !headers['if-modified-since'] + ) { + headers['if-modified-since'] = this._resHeaders['last-modified']; + } + + return headers; + } + + /** + * Creates new CachePolicy with information combined from the previews response, + * and the new revalidation response. + * + * Returns {policy, modified} where modified is a boolean indicating + * whether the response body has been modified, and old cached body can't be used. + * + * @return {Object} {policy: CachePolicy, modified: Boolean} + */ + revalidatedPolicy(request, response) { + this._assertRequestHasHeaders(request); + if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful + return { + modified: false, + matches: false, + policy: this, + }; + } + if (!response || !response.headers) { + throw Error('Response headers missing'); + } + + // These aren't going to be supported exactly, since one CachePolicy object + // doesn't know about all the other cached objects. + let matches = false; + if (response.status !== undefined && response.status != 304) { + matches = false; + } else if ( + response.headers.etag && + !/^\s*W\//.test(response.headers.etag) + ) { + // "All of the stored responses with the same strong validator are selected. + // If none of the stored responses contain the same strong validator, + // then the cache MUST NOT use the new response to update any stored responses." + matches = + this._resHeaders.etag && + this._resHeaders.etag.replace(/^\s*W\//, '') === + response.headers.etag; + } else if (this._resHeaders.etag && response.headers.etag) { + // "If the new response contains a weak validator and that validator corresponds + // to one of the cache's stored responses, + // then the most recent of those matching stored responses is selected for update." + matches = + this._resHeaders.etag.replace(/^\s*W\//, '') === + response.headers.etag.replace(/^\s*W\//, ''); + } else if (this._resHeaders['last-modified']) { + matches = + this._resHeaders['last-modified'] === + response.headers['last-modified']; + } else { + // If the new response does not include any form of validator (such as in the case where + // a client generates an If-Modified-Since request from a source other than the Last-Modified + // response header field), and there is only one stored response, and that stored response also + // lacks a validator, then that stored response is selected for update. + if ( + !this._resHeaders.etag && + !this._resHeaders['last-modified'] && + !response.headers.etag && + !response.headers['last-modified'] + ) { + matches = true; + } + } + + if (!matches) { + return { + policy: new this.constructor(request, response), + // Client receiving 304 without body, even if it's invalid/mismatched has no option + // but to reuse a cached body. We don't have a good way to tell clients to do + // error recovery in such case. + modified: response.status != 304, + matches: false, + }; + } + + // use other header fields provided in the 304 (Not Modified) response to replace all instances + // of the corresponding header fields in the stored response. + const headers = {}; + for (const k in this._resHeaders) { + headers[k] = + k in response.headers && !excludedFromRevalidationUpdate[k] + ? response.headers[k] + : this._resHeaders[k]; + } + + const newResponse = Object.assign({}, response, { + status: this._status, + method: this._method, + headers, + }); + return { + policy: new this.constructor(request, newResponse, { + shared: this._isShared, + cacheHeuristic: this._cacheHeuristic, + immutableMinTimeToLive: this._immutableMinTtl, + }), + modified: false, + matches: true, + }; + } +}; + + +/***/ }), + +/***/ 9898: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const EventEmitter = __nccwpck_require__(2361); +const tls = __nccwpck_require__(4404); +const http2 = __nccwpck_require__(5158); +const QuickLRU = __nccwpck_require__(9273); + +const kCurrentStreamsCount = Symbol('currentStreamsCount'); +const kRequest = Symbol('request'); +const kOriginSet = Symbol('cachedOriginSet'); +const kGracefullyClosing = Symbol('gracefullyClosing'); + +const nameKeys = [ + // `http2.connect()` options + 'maxDeflateDynamicTableSize', + 'maxSessionMemory', + 'maxHeaderListPairs', + 'maxOutstandingPings', + 'maxReservedRemoteStreams', + 'maxSendHeaderBlockLength', + 'paddingStrategy', + + // `tls.connect()` options + 'localAddress', + 'path', + 'rejectUnauthorized', + 'minDHSize', + + // `tls.createSecureContext()` options + 'ca', + 'cert', + 'clientCertEngine', + 'ciphers', + 'key', + 'pfx', + 'servername', + 'minVersion', + 'maxVersion', + 'secureProtocol', + 'crl', + 'honorCipherOrder', + 'ecdhCurve', + 'dhparam', + 'secureOptions', + 'sessionIdContext' +]; + +const getSortedIndex = (array, value, compare) => { + let low = 0; + let high = array.length; + + while (low < high) { + const mid = (low + high) >>> 1; + + /* istanbul ignore next */ + if (compare(array[mid], value)) { + // This never gets called because we use descending sort. Better to have this anyway. + low = mid + 1; + } else { + high = mid; + } + } + + return low; +}; + +const compareSessions = (a, b) => { + return a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams; +}; + +// See https://tools.ietf.org/html/rfc8336 +const closeCoveredSessions = (where, session) => { + // Clients SHOULD NOT emit new requests on any connection whose Origin + // Set is a proper subset of another connection's Origin Set, and they + // SHOULD close it once all outstanding requests are satisfied. + for (const coveredSession of where) { + if ( + // The set is a proper subset when its length is less than the other set. + coveredSession[kOriginSet].length < session[kOriginSet].length && + + // And the other set includes all elements of the subset. + coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) && + + // Makes sure that the session can handle all requests from the covered session. + coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams + ) { + // This allows pending requests to finish and prevents making new requests. + gracefullyClose(coveredSession); + } + } +}; + +// This is basically inverted `closeCoveredSessions(...)`. +const closeSessionIfCovered = (where, coveredSession) => { + for (const session of where) { + if ( + coveredSession[kOriginSet].length < session[kOriginSet].length && + coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) && + coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams + ) { + gracefullyClose(coveredSession); + } + } +}; + +const getSessions = ({agent, isFree}) => { + const result = {}; + + // eslint-disable-next-line guard-for-in + for (const normalizedOptions in agent.sessions) { + const sessions = agent.sessions[normalizedOptions]; + + const filtered = sessions.filter(session => { + const result = session[Agent.kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams; + + return isFree ? result : !result; + }); + + if (filtered.length !== 0) { + result[normalizedOptions] = filtered; + } + } + + return result; +}; + +const gracefullyClose = session => { + session[kGracefullyClosing] = true; + + if (session[kCurrentStreamsCount] === 0) { + session.close(); + } +}; + +class Agent extends EventEmitter { + constructor({timeout = 60000, maxSessions = Infinity, maxFreeSessions = 10, maxCachedTlsSessions = 100} = {}) { + super(); + + // A session is considered busy when its current streams count + // is equal to or greater than the `maxConcurrentStreams` value. + + // A session is considered free when its current streams count + // is less than the `maxConcurrentStreams` value. + + // SESSIONS[NORMALIZED_OPTIONS] = []; + this.sessions = {}; + + // The queue for creating new sessions. It looks like this: + // QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION + // + // The entry function has `listeners`, `completed` and `destroyed` properties. + // `listeners` is an array of objects containing `resolve` and `reject` functions. + // `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed. + // `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet. + this.queue = {}; + + // Each session will use this timeout value. + this.timeout = timeout; + + // Max sessions in total + this.maxSessions = maxSessions; + + // Max free sessions in total + // TODO: decreasing `maxFreeSessions` should close some sessions + this.maxFreeSessions = maxFreeSessions; + + this._freeSessionsCount = 0; + this._sessionsCount = 0; + + // We don't support push streams by default. + this.settings = { + enablePush: false + }; + + // Reusing TLS sessions increases performance. + this.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions}); + } + + static normalizeOrigin(url, servername) { + if (typeof url === 'string') { + url = new URL(url); + } + + if (servername && url.hostname !== servername) { + url.hostname = servername; + } + + return url.origin; + } + + normalizeOptions(options) { + let normalized = ''; + + if (options) { + for (const key of nameKeys) { + if (options[key]) { + normalized += `:${options[key]}`; + } + } + } + + return normalized; + } + + _tryToCreateNewSession(normalizedOptions, normalizedOrigin) { + if (!(normalizedOptions in this.queue) || !(normalizedOrigin in this.queue[normalizedOptions])) { + return; + } + + const item = this.queue[normalizedOptions][normalizedOrigin]; + + // The entry function can be run only once. + // BUG: The session may be never created when: + // - the first condition is false AND + // - this function is never called with the same arguments in the future. + if (this._sessionsCount < this.maxSessions && !item.completed) { + item.completed = true; + + item(); + } + } + + getSession(origin, options, listeners) { + return new Promise((resolve, reject) => { + if (Array.isArray(listeners)) { + listeners = [...listeners]; + + // Resolve the current promise ASAP, we're just moving the listeners. + // They will be executed at a different time. + resolve(); + } else { + listeners = [{resolve, reject}]; + } + + const normalizedOptions = this.normalizeOptions(options); + const normalizedOrigin = Agent.normalizeOrigin(origin, options && options.servername); + + if (normalizedOrigin === undefined) { + for (const {reject} of listeners) { + reject(new TypeError('The `origin` argument needs to be a string or an URL object')); + } + + return; + } + + if (normalizedOptions in this.sessions) { + const sessions = this.sessions[normalizedOptions]; + + let maxConcurrentStreams = -1; + let currentStreamsCount = -1; + let optimalSession; + + // We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal. + // Additionally, we are looking for session which has biggest current pending streams count. + for (const session of sessions) { + const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams; + + if (sessionMaxConcurrentStreams < maxConcurrentStreams) { + break; + } + + if (session[kOriginSet].includes(normalizedOrigin)) { + const sessionCurrentStreamsCount = session[kCurrentStreamsCount]; + + if ( + sessionCurrentStreamsCount >= sessionMaxConcurrentStreams || + session[kGracefullyClosing] || + // Unfortunately the `close` event isn't called immediately, + // so `session.destroyed` is `true`, but `session.closed` is `false`. + session.destroyed + ) { + continue; + } + + // We only need set this once. + if (!optimalSession) { + maxConcurrentStreams = sessionMaxConcurrentStreams; + } + + // We're looking for the session which has biggest current pending stream count, + // in order to minimalize the amount of active sessions. + if (sessionCurrentStreamsCount > currentStreamsCount) { + optimalSession = session; + currentStreamsCount = sessionCurrentStreamsCount; + } + } + } + + if (optimalSession) { + /* istanbul ignore next: safety check */ + if (listeners.length !== 1) { + for (const {reject} of listeners) { + const error = new Error( + `Expected the length of listeners to be 1, got ${listeners.length}.\n` + + 'Please report this to https://github.com/szmarczak/http2-wrapper/' + ); + + reject(error); + } + + return; + } + + listeners[0].resolve(optimalSession); + return; + } + } + + if (normalizedOptions in this.queue) { + if (normalizedOrigin in this.queue[normalizedOptions]) { + // There's already an item in the queue, just attach ourselves to it. + this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners); + + // This shouldn't be executed here. + // See the comment inside _tryToCreateNewSession. + this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); + return; + } + } else { + this.queue[normalizedOptions] = {}; + } + + // The entry must be removed from the queue IMMEDIATELY when: + // 1. the session connects successfully, + // 2. an error occurs. + const removeFromQueue = () => { + // Our entry can be replaced. We cannot remove the new one. + if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) { + delete this.queue[normalizedOptions][normalizedOrigin]; + + if (Object.keys(this.queue[normalizedOptions]).length === 0) { + delete this.queue[normalizedOptions]; + } + } + }; + + // The main logic is here + const entry = () => { + const name = `${normalizedOrigin}:${normalizedOptions}`; + let receivedSettings = false; + + try { + const session = http2.connect(origin, { + createConnection: this.createConnection, + settings: this.settings, + session: this.tlsSessionCache.get(name), + ...options + }); + session[kCurrentStreamsCount] = 0; + session[kGracefullyClosing] = false; + + const isFree = () => session[kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams; + let wasFree = true; + + session.socket.once('session', tlsSession => { + this.tlsSessionCache.set(name, tlsSession); + }); + + session.once('error', error => { + // Listeners are empty when the session successfully connected. + for (const {reject} of listeners) { + reject(error); + } + + // The connection got broken, purge the cache. + this.tlsSessionCache.delete(name); + }); + + session.setTimeout(this.timeout, () => { + // Terminates all streams owned by this session. + // TODO: Maybe the streams should have a "Session timed out" error? + session.destroy(); + }); + + session.once('close', () => { + if (receivedSettings) { + // 1. If it wasn't free then no need to decrease because + // it has been decreased already in session.request(). + // 2. `stream.once('close')` won't increment the count + // because the session is already closed. + if (wasFree) { + this._freeSessionsCount--; + } + + this._sessionsCount--; + + // This cannot be moved to the stream logic, + // because there may be a session that hadn't made a single request. + const where = this.sessions[normalizedOptions]; + where.splice(where.indexOf(session), 1); + + if (where.length === 0) { + delete this.sessions[normalizedOptions]; + } + } else { + // Broken connection + const error = new Error('Session closed without receiving a SETTINGS frame'); + error.code = 'HTTP2WRAPPER_NOSETTINGS'; + + for (const {reject} of listeners) { + reject(error); + } + + removeFromQueue(); + } + + // There may be another session awaiting. + this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); + }); + + // Iterates over the queue and processes listeners. + const processListeners = () => { + if (!(normalizedOptions in this.queue) || !isFree()) { + return; + } + + for (const origin of session[kOriginSet]) { + if (origin in this.queue[normalizedOptions]) { + const {listeners} = this.queue[normalizedOptions][origin]; + + // Prevents session overloading. + while (listeners.length !== 0 && isFree()) { + // We assume `resolve(...)` calls `request(...)` *directly*, + // otherwise the session will get overloaded. + listeners.shift().resolve(session); + } + + const where = this.queue[normalizedOptions]; + if (where[origin].listeners.length === 0) { + delete where[origin]; + + if (Object.keys(where).length === 0) { + delete this.queue[normalizedOptions]; + break; + } + } + + // We're no longer free, no point in continuing. + if (!isFree()) { + break; + } + } + } + }; + + // The Origin Set cannot shrink. No need to check if it suddenly became covered by another one. + session.on('origin', () => { + session[kOriginSet] = session.originSet; + + if (!isFree()) { + // The session is full. + return; + } + + processListeners(); + + // Close covered sessions (if possible). + closeCoveredSessions(this.sessions[normalizedOptions], session); + }); + + session.once('remoteSettings', () => { + // Fix Node.js bug preventing the process from exiting + session.ref(); + session.unref(); + + this._sessionsCount++; + + // The Agent could have been destroyed already. + if (entry.destroyed) { + const error = new Error('Agent has been destroyed'); + + for (const listener of listeners) { + listener.reject(error); + } + + session.destroy(); + return; + } + + session[kOriginSet] = session.originSet; + + { + const where = this.sessions; + + if (normalizedOptions in where) { + const sessions = where[normalizedOptions]; + sessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session); + } else { + where[normalizedOptions] = [session]; + } + } + + this._freeSessionsCount += 1; + receivedSettings = true; + + this.emit('session', session); + + processListeners(); + removeFromQueue(); + + // TODO: Close last recently used (or least used?) session + if (session[kCurrentStreamsCount] === 0 && this._freeSessionsCount > this.maxFreeSessions) { + session.close(); + } + + // Check if we haven't managed to execute all listeners. + if (listeners.length !== 0) { + // Request for a new session with predefined listeners. + this.getSession(normalizedOrigin, options, listeners); + listeners.length = 0; + } + + // `session.remoteSettings.maxConcurrentStreams` might get increased + session.on('remoteSettings', () => { + processListeners(); + + // In case the Origin Set changes + closeCoveredSessions(this.sessions[normalizedOptions], session); + }); + }); + + // Shim `session.request()` in order to catch all streams + session[kRequest] = session.request; + session.request = (headers, streamOptions) => { + if (session[kGracefullyClosing]) { + throw new Error('The session is gracefully closing. No new streams are allowed.'); + } + + const stream = session[kRequest](headers, streamOptions); + + // The process won't exit until the session is closed or all requests are gone. + session.ref(); + + ++session[kCurrentStreamsCount]; + + if (session[kCurrentStreamsCount] === session.remoteSettings.maxConcurrentStreams) { + this._freeSessionsCount--; + } + + stream.once('close', () => { + wasFree = isFree(); + + --session[kCurrentStreamsCount]; + + if (!session.destroyed && !session.closed) { + closeSessionIfCovered(this.sessions[normalizedOptions], session); + + if (isFree() && !session.closed) { + if (!wasFree) { + this._freeSessionsCount++; + + wasFree = true; + } + + const isEmpty = session[kCurrentStreamsCount] === 0; + + if (isEmpty) { + session.unref(); + } + + if ( + isEmpty && + ( + this._freeSessionsCount > this.maxFreeSessions || + session[kGracefullyClosing] + ) + ) { + session.close(); + } else { + closeCoveredSessions(this.sessions[normalizedOptions], session); + processListeners(); + } + } + } + }); + + return stream; + }; + } catch (error) { + for (const listener of listeners) { + listener.reject(error); + } + + removeFromQueue(); + } + }; + + entry.listeners = listeners; + entry.completed = false; + entry.destroyed = false; + + this.queue[normalizedOptions][normalizedOrigin] = entry; + this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); + }); + } + + request(origin, options, headers, streamOptions) { + return new Promise((resolve, reject) => { + this.getSession(origin, options, [{ + reject, + resolve: session => { + try { + resolve(session.request(headers, streamOptions)); + } catch (error) { + reject(error); + } + } + }]); + }); + } + + createConnection(origin, options) { + return Agent.connect(origin, options); + } + + static connect(origin, options) { + options.ALPNProtocols = ['h2']; + + const port = origin.port || 443; + const host = origin.hostname || origin.host; + + if (typeof options.servername === 'undefined') { + options.servername = host; + } + + return tls.connect(port, host, options); + } + + closeFreeSessions() { + for (const sessions of Object.values(this.sessions)) { + for (const session of sessions) { + if (session[kCurrentStreamsCount] === 0) { + session.close(); + } + } + } + } + + destroy(reason) { + for (const sessions of Object.values(this.sessions)) { + for (const session of sessions) { + session.destroy(reason); + } + } + + for (const entriesOfAuthority of Object.values(this.queue)) { + for (const entry of Object.values(entriesOfAuthority)) { + entry.destroyed = true; + } + } + + // New requests should NOT attach to destroyed sessions + this.queue = {}; + } + + get freeSessions() { + return getSessions({agent: this, isFree: true}); + } + + get busySessions() { + return getSessions({agent: this, isFree: false}); + } +} + +Agent.kCurrentStreamsCount = kCurrentStreamsCount; +Agent.kGracefullyClosing = kGracefullyClosing; + +module.exports = { + Agent, + globalAgent: new Agent() +}; + + +/***/ }), + +/***/ 7167: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const resolveALPN = __nccwpck_require__(6624); +const QuickLRU = __nccwpck_require__(9273); +const Http2ClientRequest = __nccwpck_require__(9632); +const calculateServerName = __nccwpck_require__(1982); +const urlToOptions = __nccwpck_require__(2686); + +const cache = new QuickLRU({maxSize: 100}); +const queue = new Map(); + +const installSocket = (agent, socket, options) => { + socket._httpMessage = {shouldKeepAlive: true}; + + const onFree = () => { + agent.emit('free', socket, options); + }; + + socket.on('free', onFree); + + const onClose = () => { + agent.removeSocket(socket, options); + }; + + socket.on('close', onClose); + + const onRemove = () => { + agent.removeSocket(socket, options); + socket.off('close', onClose); + socket.off('free', onFree); + socket.off('agentRemove', onRemove); + }; + + socket.on('agentRemove', onRemove); + + agent.emit('free', socket, options); +}; + +const resolveProtocol = async options => { + const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`; + + if (!cache.has(name)) { + if (queue.has(name)) { + const result = await queue.get(name); + return result.alpnProtocol; + } + + const {path, agent} = options; + options.path = options.socketPath; + + const resultPromise = resolveALPN(options); + queue.set(name, resultPromise); + + try { + const {socket, alpnProtocol} = await resultPromise; + cache.set(name, alpnProtocol); + + options.path = path; + + if (alpnProtocol === 'h2') { + // https://github.com/nodejs/node/issues/33343 + socket.destroy(); + } else { + const {globalAgent} = https; + const defaultCreateConnection = https.Agent.prototype.createConnection; + + if (agent) { + if (agent.createConnection === defaultCreateConnection) { + installSocket(agent, socket, options); + } else { + socket.destroy(); + } + } else if (globalAgent.createConnection === defaultCreateConnection) { + installSocket(globalAgent, socket, options); + } else { + socket.destroy(); + } + } + + queue.delete(name); + + return alpnProtocol; + } catch (error) { + queue.delete(name); + + throw error; + } + } + + return cache.get(name); +}; + +module.exports = async (input, options, callback) => { + if (typeof input === 'string' || input instanceof URL) { + input = urlToOptions(new URL(input)); + } + + if (typeof options === 'function') { + callback = options; + options = undefined; + } + + options = { + ALPNProtocols: ['h2', 'http/1.1'], + ...input, + ...options, + resolveSocket: true + }; + + if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) { + throw new Error('The `ALPNProtocols` option must be an Array with at least one entry'); + } + + options.protocol = options.protocol || 'https:'; + const isHttps = options.protocol === 'https:'; + + options.host = options.hostname || options.host || 'localhost'; + options.session = options.tlsSession; + options.servername = options.servername || calculateServerName(options); + options.port = options.port || (isHttps ? 443 : 80); + options._defaultAgent = isHttps ? https.globalAgent : http.globalAgent; + + const agents = options.agent; + + if (agents) { + if (agents.addRequest) { + throw new Error('The `options.agent` object can contain only `http`, `https` or `http2` properties'); + } + + options.agent = agents[isHttps ? 'https' : 'http']; + } + + if (isHttps) { + const protocol = await resolveProtocol(options); + + if (protocol === 'h2') { + if (agents) { + options.agent = agents.http2; + } + + return new Http2ClientRequest(options, callback); + } + } + + return http.request(options, callback); +}; + +module.exports.protocolCache = cache; + + +/***/ }), + +/***/ 9632: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const http2 = __nccwpck_require__(5158); +const {Writable} = __nccwpck_require__(2781); +const {Agent, globalAgent} = __nccwpck_require__(9898); +const IncomingMessage = __nccwpck_require__(2575); +const urlToOptions = __nccwpck_require__(2686); +const proxyEvents = __nccwpck_require__(1818); +const isRequestPseudoHeader = __nccwpck_require__(1199); +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_PROTOCOL, + ERR_HTTP_HEADERS_SENT, + ERR_INVALID_HTTP_TOKEN, + ERR_HTTP_INVALID_HEADER_VALUE, + ERR_INVALID_CHAR +} = __nccwpck_require__(7087); + +const { + HTTP2_HEADER_STATUS, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_METHOD_CONNECT +} = http2.constants; + +const kHeaders = Symbol('headers'); +const kOrigin = Symbol('origin'); +const kSession = Symbol('session'); +const kOptions = Symbol('options'); +const kFlushedHeaders = Symbol('flushedHeaders'); +const kJobs = Symbol('jobs'); + +const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/; +const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/; + +class ClientRequest extends Writable { + constructor(input, options, callback) { + super({ + autoDestroy: false + }); + + const hasInput = typeof input === 'string' || input instanceof URL; + if (hasInput) { + input = urlToOptions(input instanceof URL ? input : new URL(input)); + } + + if (typeof options === 'function' || options === undefined) { + // (options, callback) + callback = options; + options = hasInput ? input : {...input}; + } else { + // (input, options, callback) + options = {...input, ...options}; + } + + if (options.h2session) { + this[kSession] = options.h2session; + } else if (options.agent === false) { + this.agent = new Agent({maxFreeSessions: 0}); + } else if (typeof options.agent === 'undefined' || options.agent === null) { + if (typeof options.createConnection === 'function') { + // This is a workaround - we don't have to create the session on our own. + this.agent = new Agent({maxFreeSessions: 0}); + this.agent.createConnection = options.createConnection; + } else { + this.agent = globalAgent; + } + } else if (typeof options.agent.request === 'function') { + this.agent = options.agent; + } else { + throw new ERR_INVALID_ARG_TYPE('options.agent', ['Agent-like Object', 'undefined', 'false'], options.agent); + } + + if (options.protocol && options.protocol !== 'https:') { + throw new ERR_INVALID_PROTOCOL(options.protocol, 'https:'); + } + + const port = options.port || options.defaultPort || (this.agent && this.agent.defaultPort) || 443; + const host = options.hostname || options.host || 'localhost'; + + // Don't enforce the origin via options. It may be changed in an Agent. + delete options.hostname; + delete options.host; + delete options.port; + + const {timeout} = options; + options.timeout = undefined; + + this[kHeaders] = Object.create(null); + this[kJobs] = []; + + this.socket = null; + this.connection = null; + + this.method = options.method || 'GET'; + this.path = options.path; + + this.res = null; + this.aborted = false; + this.reusedSocket = false; + + if (options.headers) { + for (const [header, value] of Object.entries(options.headers)) { + this.setHeader(header, value); + } + } + + if (options.auth && !('authorization' in this[kHeaders])) { + this[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64'); + } + + options.session = options.tlsSession; + options.path = options.socketPath; + + this[kOptions] = options; + + // Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field. + if (port === 443) { + this[kOrigin] = `https://${host}`; + + if (!(':authority' in this[kHeaders])) { + this[kHeaders][':authority'] = host; + } + } else { + this[kOrigin] = `https://${host}:${port}`; + + if (!(':authority' in this[kHeaders])) { + this[kHeaders][':authority'] = `${host}:${port}`; + } + } + + if (timeout) { + this.setTimeout(timeout); + } + + if (callback) { + this.once('response', callback); + } + + this[kFlushedHeaders] = false; + } + + get method() { + return this[kHeaders][HTTP2_HEADER_METHOD]; + } + + set method(value) { + if (value) { + this[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase(); + } + } + + get path() { + return this[kHeaders][HTTP2_HEADER_PATH]; + } + + set path(value) { + if (value) { + this[kHeaders][HTTP2_HEADER_PATH] = value; + } + } + + get _mustNotHaveABody() { + return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE'; + } + + _write(chunk, encoding, callback) { + // https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156 + if (this._mustNotHaveABody) { + callback(new Error('The GET, HEAD and DELETE methods must NOT have a body')); + /* istanbul ignore next: Node.js 12 throws directly */ + return; + } + + this.flushHeaders(); + + const callWrite = () => this._request.write(chunk, encoding, callback); + if (this._request) { + callWrite(); + } else { + this[kJobs].push(callWrite); + } + } + + _final(callback) { + if (this.destroyed) { + return; + } + + this.flushHeaders(); + + const callEnd = () => { + // For GET, HEAD and DELETE + if (this._mustNotHaveABody) { + callback(); + return; + } + + this._request.end(callback); + }; + + if (this._request) { + callEnd(); + } else { + this[kJobs].push(callEnd); + } + } + + abort() { + if (this.res && this.res.complete) { + return; + } + + if (!this.aborted) { + process.nextTick(() => this.emit('abort')); + } + + this.aborted = true; + + this.destroy(); + } + + _destroy(error, callback) { + if (this.res) { + this.res._dump(); + } + + if (this._request) { + this._request.destroy(); + } + + callback(error); + } + + async flushHeaders() { + if (this[kFlushedHeaders] || this.destroyed) { + return; + } + + this[kFlushedHeaders] = true; + + const isConnectMethod = this.method === HTTP2_METHOD_CONNECT; + + // The real magic is here + const onStream = stream => { + this._request = stream; + + if (this.destroyed) { + stream.destroy(); + return; + } + + // Forwards `timeout`, `continue`, `close` and `error` events to this instance. + if (!isConnectMethod) { + proxyEvents(stream, this, ['timeout', 'continue', 'close', 'error']); + } + + // Wait for the `finish` event. We don't want to emit the `response` event + // before `request.end()` is called. + const waitForEnd = fn => { + return (...args) => { + if (!this.writable && !this.destroyed) { + fn(...args); + } else { + this.once('finish', () => { + fn(...args); + }); + } + }; + }; + + // This event tells we are ready to listen for the data. + stream.once('response', waitForEnd((headers, flags, rawHeaders) => { + // If we were to emit raw request stream, it would be as fast as the native approach. + // Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it). + const response = new IncomingMessage(this.socket, stream.readableHighWaterMark); + this.res = response; + + response.req = this; + response.statusCode = headers[HTTP2_HEADER_STATUS]; + response.headers = headers; + response.rawHeaders = rawHeaders; + + response.once('end', () => { + if (this.aborted) { + response.aborted = true; + response.emit('aborted'); + } else { + response.complete = true; + + // Has no effect, just be consistent with the Node.js behavior + response.socket = null; + response.connection = null; + } + }); + + if (isConnectMethod) { + response.upgrade = true; + + // The HTTP1 API says the socket is detached here, + // but we can't do that so we pass the original HTTP2 request. + if (this.emit('connect', response, stream, Buffer.alloc(0))) { + this.emit('close'); + } else { + // No listeners attached, destroy the original request. + stream.destroy(); + } + } else { + // Forwards data + stream.on('data', chunk => { + if (!response._dumped && !response.push(chunk)) { + stream.pause(); + } + }); + + stream.once('end', () => { + response.push(null); + }); + + if (!this.emit('response', response)) { + // No listeners attached, dump the response. + response._dump(); + } + } + })); + + // Emits `information` event + stream.once('headers', waitForEnd( + headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]}) + )); + + stream.once('trailers', waitForEnd((trailers, flags, rawTrailers) => { + const {res} = this; + + // Assigns trailers to the response object. + res.trailers = trailers; + res.rawTrailers = rawTrailers; + })); + + const {socket} = stream.session; + this.socket = socket; + this.connection = socket; + + for (const job of this[kJobs]) { + job(); + } + + this.emit('socket', this.socket); + }; + + // Makes a HTTP2 request + if (this[kSession]) { + try { + onStream(this[kSession].request(this[kHeaders])); + } catch (error) { + this.emit('error', error); + } + } else { + this.reusedSocket = true; + + try { + onStream(await this.agent.request(this[kOrigin], this[kOptions], this[kHeaders])); + } catch (error) { + this.emit('error', error); + } + } + } + + getHeader(name) { + if (typeof name !== 'string') { + throw new ERR_INVALID_ARG_TYPE('name', 'string', name); + } + + return this[kHeaders][name.toLowerCase()]; + } + + get headersSent() { + return this[kFlushedHeaders]; + } + + removeHeader(name) { + if (typeof name !== 'string') { + throw new ERR_INVALID_ARG_TYPE('name', 'string', name); + } + + if (this.headersSent) { + throw new ERR_HTTP_HEADERS_SENT('remove'); + } + + delete this[kHeaders][name.toLowerCase()]; + } + + setHeader(name, value) { + if (this.headersSent) { + throw new ERR_HTTP_HEADERS_SENT('set'); + } + + if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) { + throw new ERR_INVALID_HTTP_TOKEN('Header name', name); + } + + if (typeof value === 'undefined') { + throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name); + } + + if (isInvalidHeaderValue.test(value)) { + throw new ERR_INVALID_CHAR('header content', name); + } + + this[kHeaders][name.toLowerCase()] = value; + } + + setNoDelay() { + // HTTP2 sockets cannot be malformed, do nothing. + } + + setSocketKeepAlive() { + // HTTP2 sockets cannot be malformed, do nothing. + } + + setTimeout(ms, callback) { + const applyTimeout = () => this._request.setTimeout(ms, callback); + + if (this._request) { + applyTimeout(); + } else { + this[kJobs].push(applyTimeout); + } + + return this; + } + + get maxHeadersCount() { + if (!this.destroyed && this._request) { + return this._request.session.localSettings.maxHeaderListSize; + } + + return undefined; + } + + set maxHeadersCount(_value) { + // Updating HTTP2 settings would affect all requests, do nothing. + } +} + +module.exports = ClientRequest; + + +/***/ }), + +/***/ 2575: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {Readable} = __nccwpck_require__(2781); + +class IncomingMessage extends Readable { + constructor(socket, highWaterMark) { + super({ + highWaterMark, + autoDestroy: false + }); + + this.statusCode = null; + this.statusMessage = ''; + this.httpVersion = '2.0'; + this.httpVersionMajor = 2; + this.httpVersionMinor = 0; + this.headers = {}; + this.trailers = {}; + this.req = null; + + this.aborted = false; + this.complete = false; + this.upgrade = null; + + this.rawHeaders = []; + this.rawTrailers = []; + + this.socket = socket; + this.connection = socket; + + this._dumped = false; + } + + _destroy(error) { + this.req._request.destroy(error); + } + + setTimeout(ms, callback) { + this.req.setTimeout(ms, callback); + return this; + } + + _dump() { + if (!this._dumped) { + this._dumped = true; + + this.removeAllListeners('data'); + this.resume(); + } + } + + _read() { + if (this.req) { + this.req._request.resume(); + } + } +} + +module.exports = IncomingMessage; + + +/***/ }), + +/***/ 4645: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const http2 = __nccwpck_require__(5158); +const agent = __nccwpck_require__(9898); +const ClientRequest = __nccwpck_require__(9632); +const IncomingMessage = __nccwpck_require__(2575); +const auto = __nccwpck_require__(7167); + +const request = (url, options, callback) => { + return new ClientRequest(url, options, callback); +}; + +const get = (url, options, callback) => { + // eslint-disable-next-line unicorn/prevent-abbreviations + const req = new ClientRequest(url, options, callback); + req.end(); + + return req; +}; + +module.exports = { + ...http2, + ClientRequest, + IncomingMessage, + ...agent, + request, + get, + auto +}; + + +/***/ }), + +/***/ 1982: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const net = __nccwpck_require__(1808); +/* istanbul ignore file: https://github.com/nodejs/node/blob/v13.0.1/lib/_http_agent.js */ + +module.exports = options => { + let servername = options.host; + const hostHeader = options.headers && options.headers.host; + + if (hostHeader) { + if (hostHeader.startsWith('[')) { + const index = hostHeader.indexOf(']'); + if (index === -1) { + servername = hostHeader; + } else { + servername = hostHeader.slice(1, -1); + } + } else { + servername = hostHeader.split(':', 1)[0]; + } + } + + if (net.isIP(servername)) { + return ''; + } + + return servername; +}; + + +/***/ }), + +/***/ 7087: +/***/ ((module) => { + +"use strict"; + +/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */ + +const makeError = (Base, key, getMessage) => { + module.exports[key] = class NodeError extends Base { + constructor(...args) { + super(typeof getMessage === 'string' ? getMessage : getMessage(args)); + this.name = `${super.name} [${key}]`; + this.code = key; + } + }; +}; + +makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => { + const type = args[0].includes('.') ? 'property' : 'argument'; + + let valid = args[1]; + const isManyTypes = Array.isArray(valid); + + if (isManyTypes) { + valid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`; + } + + return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`; +}); + +makeError(TypeError, 'ERR_INVALID_PROTOCOL', args => { + return `Protocol "${args[0]}" not supported. Expected "${args[1]}"`; +}); + +makeError(Error, 'ERR_HTTP_HEADERS_SENT', args => { + return `Cannot ${args[0]} headers after they are sent to the client`; +}); + +makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => { + return `${args[0]} must be a valid HTTP token [${args[1]}]`; +}); + +makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => { + return `Invalid value "${args[0]} for header "${args[1]}"`; +}); + +makeError(TypeError, 'ERR_INVALID_CHAR', args => { + return `Invalid character in ${args[0]} [${args[1]}]`; +}); + + +/***/ }), + +/***/ 1199: +/***/ ((module) => { + +"use strict"; + + +module.exports = header => { + switch (header) { + case ':method': + case ':scheme': + case ':authority': + case ':path': + return true; + default: + return false; + } +}; + + +/***/ }), + +/***/ 1818: +/***/ ((module) => { + +"use strict"; + + +module.exports = (from, to, events) => { + for (const event of events) { + from.on(event, (...args) => to.emit(event, ...args)); + } +}; + + +/***/ }), + +/***/ 2686: +/***/ ((module) => { + +"use strict"; + +/* istanbul ignore file: https://github.com/nodejs/node/blob/a91293d4d9ab403046ab5eb022332e4e3d249bd3/lib/internal/url.js#L1257 */ + +module.exports = url => { + const options = { + protocol: url.protocol, + hostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname, + host: url.host, + hash: url.hash, + search: url.search, + pathname: url.pathname, + href: url.href, + path: `${url.pathname || ''}${url.search || ''}` + }; + + if (typeof url.port === 'string' && url.port.length !== 0) { + options.port = Number(url.port); + } + + if (url.username || url.password) { + options.auth = `${url.username || ''}:${url.password || ''}`; + } + + return options; +}; + + +/***/ }), + +/***/ 6435: +/***/ ((module) => { + +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; + + +/***/ }), + +/***/ 4466: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = __nccwpck_require__(6435); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var check = strictCheck; + + // optionally relax check + if (options && options.strict === false) { + check = relaxedCheck; + } + + return check(str); +}; + + +/***/ }), + +/***/ 5680: +/***/ ((module) => { + +"use strict"; +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + + + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); + } + return false; +}; + + +/***/ }), + +/***/ 2820: +/***/ ((__unused_webpack_module, exports) => { + +//TODO: handle reviver/dehydrate function like normal +//and handle indentation, like normal. +//if anyone needs this... please send pull request. + +exports.stringify = function stringify (o) { + if('undefined' == typeof o) return o + + if(o && Buffer.isBuffer(o)) + return JSON.stringify(':base64:' + o.toString('base64')) + + if(o && o.toJSON) + o = o.toJSON() + + if(o && 'object' === typeof o) { + var s = '' + var array = Array.isArray(o) + s = array ? '[' : '{' + var first = true + + for(var k in o) { + var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k]) + if(Object.hasOwnProperty.call(o, k) && !ignore) { + if(!first) + s += ',' + first = false + if (array) { + if(o[k] == undefined) + s += 'null' + else + s += stringify(o[k]) + } else if (o[k] !== void(0)) { + s += stringify(k) + ':' + stringify(o[k]) + } + } + } + + s += array ? ']' : '}' + + return s + } else if ('string' === typeof o) { + return JSON.stringify(/^:/.test(o) ? ':' + o : o) + } else if ('undefined' === typeof o) { + return 'null'; + } else + return JSON.stringify(o) +} + +exports.parse = function (s) { + return JSON.parse(s, function (key, value) { + if('string' === typeof value) { + if(/^:base64:/.test(value)) + return Buffer.from(value.substring(8), 'base64') + else + return /^:/.test(value) ? value.substring(1) : value + } + return value + }) +} + + +/***/ }), + +/***/ 1531: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = __nccwpck_require__(2361); +const JSONB = __nccwpck_require__(2820); + +const loadStore = options => { + const adapters = { + redis: '@keyv/redis', + rediss: '@keyv/redis', + mongodb: '@keyv/mongo', + mongo: '@keyv/mongo', + sqlite: '@keyv/sqlite', + postgresql: '@keyv/postgres', + postgres: '@keyv/postgres', + mysql: '@keyv/mysql', + etcd: '@keyv/etcd', + offline: '@keyv/offline', + tiered: '@keyv/tiered', + }; + if (options.adapter || options.uri) { + const adapter = options.adapter || /^[^:+]*/.exec(options.uri)[0]; + return new (require(adapters[adapter]))(options); + } + + return new Map(); +}; + +const iterableAdapters = [ + 'sqlite', + 'postgres', + 'mysql', + 'mongo', + 'redis', + 'tiered', +]; + +class Keyv extends EventEmitter { + constructor(uri, {emitErrors = true, ...options} = {}) { + super(); + this.opts = { + namespace: 'keyv', + serialize: JSONB.stringify, + deserialize: JSONB.parse, + ...((typeof uri === 'string') ? {uri} : uri), + ...options, + }; + + if (!this.opts.store) { + const adapterOptions = {...this.opts}; + this.opts.store = loadStore(adapterOptions); + } + + if (this.opts.compression) { + const compression = this.opts.compression; + this.opts.serialize = compression.serialize.bind(compression); + this.opts.deserialize = compression.deserialize.bind(compression); + } + + if (typeof this.opts.store.on === 'function' && emitErrors) { + this.opts.store.on('error', error => this.emit('error', error)); + } + + this.opts.store.namespace = this.opts.namespace; + + const generateIterator = iterator => async function * () { + for await (const [key, raw] of typeof iterator === 'function' + ? iterator(this.opts.store.namespace) + : iterator) { + const data = await this.opts.deserialize(raw); + if (this.opts.store.namespace && !key.includes(this.opts.store.namespace)) { + continue; + } + + if (typeof data.expires === 'number' && Date.now() > data.expires) { + this.delete(key); + continue; + } + + yield [this._getKeyUnprefix(key), data.value]; + } + }; + + // Attach iterators + if (typeof this.opts.store[Symbol.iterator] === 'function' && this.opts.store instanceof Map) { + this.iterator = generateIterator(this.opts.store); + } else if (typeof this.opts.store.iterator === 'function' && this.opts.store.opts + && this._checkIterableAdaptar()) { + this.iterator = generateIterator(this.opts.store.iterator.bind(this.opts.store)); + } + } + + _checkIterableAdaptar() { + return iterableAdapters.includes(this.opts.store.opts.dialect) + || iterableAdapters.findIndex(element => this.opts.store.opts.url.includes(element)) >= 0; + } + + _getKeyPrefix(key) { + return `${this.opts.namespace}:${key}`; + } + + _getKeyPrefixArray(keys) { + return keys.map(key => `${this.opts.namespace}:${key}`); + } + + _getKeyUnprefix(key) { + return key + .split(':') + .splice(1) + .join(':'); + } + + get(key, options) { + const {store} = this.opts; + const isArray = Array.isArray(key); + const keyPrefixed = isArray ? this._getKeyPrefixArray(key) : this._getKeyPrefix(key); + if (isArray && store.getMany === undefined) { + const promises = []; + for (const key of keyPrefixed) { + promises.push(Promise.resolve() + .then(() => store.get(key)) + .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data)) + .then(data => { + if (data === undefined || data === null) { + return undefined; + } + + if (typeof data.expires === 'number' && Date.now() > data.expires) { + return this.delete(key).then(() => undefined); + } + + return (options && options.raw) ? data : data.value; + }), + ); + } + + return Promise.allSettled(promises) + .then(values => { + const data = []; + for (const value of values) { + data.push(value.value); + } + + return data; + }); + } + + return Promise.resolve() + .then(() => isArray ? store.getMany(keyPrefixed) : store.get(keyPrefixed)) + .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data)) + .then(data => { + if (data === undefined || data === null) { + return undefined; + } + + if (isArray) { + const result = []; + + for (let row of data) { + if ((typeof row === 'string')) { + row = this.opts.deserialize(row); + } + + if (row === undefined || row === null) { + result.push(undefined); + continue; + } + + if (typeof row.expires === 'number' && Date.now() > row.expires) { + this.delete(key).then(() => undefined); + result.push(undefined); + } else { + result.push((options && options.raw) ? row : row.value); + } + } + + return result; + } + + if (typeof data.expires === 'number' && Date.now() > data.expires) { + return this.delete(key).then(() => undefined); + } + + return (options && options.raw) ? data : data.value; + }); + } + + set(key, value, ttl) { + const keyPrefixed = this._getKeyPrefix(key); + if (typeof ttl === 'undefined') { + ttl = this.opts.ttl; + } + + if (ttl === 0) { + ttl = undefined; + } + + const {store} = this.opts; + + return Promise.resolve() + .then(() => { + const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null; + if (typeof value === 'symbol') { + this.emit('error', 'symbol cannot be serialized'); + } + + value = {value, expires}; + return this.opts.serialize(value); + }) + .then(value => store.set(keyPrefixed, value, ttl)) + .then(() => true); + } + + delete(key) { + const {store} = this.opts; + if (Array.isArray(key)) { + const keyPrefixed = this._getKeyPrefixArray(key); + if (store.deleteMany === undefined) { + const promises = []; + for (const key of keyPrefixed) { + promises.push(store.delete(key)); + } + + return Promise.allSettled(promises) + .then(values => values.every(x => x.value === true)); + } + + return Promise.resolve() + .then(() => store.deleteMany(keyPrefixed)); + } + + const keyPrefixed = this._getKeyPrefix(key); + return Promise.resolve() + .then(() => store.delete(keyPrefixed)); + } + + clear() { + const {store} = this.opts; + return Promise.resolve() + .then(() => store.clear()); + } + + has(key) { + const keyPrefixed = this._getKeyPrefix(key); + const {store} = this.opts; + return Promise.resolve() + .then(async () => { + if (typeof store.has === 'function') { + return store.has(keyPrefixed); + } + + const value = await store.get(keyPrefixed); + return value !== undefined; + }); + } + + disconnect() { + const {store} = this.opts; + if (typeof store.disconnect === 'function') { + return store.disconnect(); + } + } +} + +module.exports = Keyv; + + +/***/ }), + +/***/ 9662: +/***/ ((module) => { + +"use strict"; + +module.exports = object => { + const result = {}; + + for (const [key, value] of Object.entries(object)) { + result[key.toLowerCase()] = value; + } + + return result; +}; + + +/***/ }), + +/***/ 2578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +/* + * merge2 + * https://github.com/teambition/merge2 + * + * Copyright (c) 2014-2020 Teambition + * Licensed under the MIT license. + */ +const Stream = __nccwpck_require__(2781) +const PassThrough = Stream.PassThrough +const slice = Array.prototype.slice + +module.exports = merge2 + +function merge2 () { + const streamsQueue = [] + const args = slice.call(arguments) + let merging = false + let options = args[args.length - 1] + + if (options && !Array.isArray(options) && options.pipe == null) { + args.pop() + } else { + options = {} + } + + const doEnd = options.end !== false + const doPipeError = options.pipeError === true + if (options.objectMode == null) { + options.objectMode = true + } + if (options.highWaterMark == null) { + options.highWaterMark = 64 * 1024 + } + const mergedStream = PassThrough(options) + + function addStream () { + for (let i = 0, len = arguments.length; i < len; i++) { + streamsQueue.push(pauseStreams(arguments[i], options)) + } + mergeStream() + return this + } + + function mergeStream () { + if (merging) { + return + } + merging = true + + let streams = streamsQueue.shift() + if (!streams) { + process.nextTick(endStream) + return + } + if (!Array.isArray(streams)) { + streams = [streams] + } + + let pipesCount = streams.length + 1 + + function next () { + if (--pipesCount > 0) { + return + } + merging = false + mergeStream() + } + + function pipe (stream) { + function onend () { + stream.removeListener('merge2UnpipeEnd', onend) + stream.removeListener('end', onend) + if (doPipeError) { + stream.removeListener('error', onerror) + } + next() + } + function onerror (err) { + mergedStream.emit('error', err) + } + // skip ended stream + if (stream._readableState.endEmitted) { + return next() + } + + stream.on('merge2UnpipeEnd', onend) + stream.on('end', onend) + + if (doPipeError) { + stream.on('error', onerror) + } + + stream.pipe(mergedStream, { end: false }) + // compatible for old stream + stream.resume() + } + + for (let i = 0; i < streams.length; i++) { + pipe(streams[i]) + } + + next() + } + + function endStream () { + merging = false + // emit 'queueDrain' when all streams merged. + mergedStream.emit('queueDrain') + if (doEnd) { + mergedStream.end() + } + } + + mergedStream.setMaxListeners(0) + mergedStream.add = addStream + mergedStream.on('unpipe', function (stream) { + stream.emit('merge2UnpipeEnd') + }) + + if (args.length) { + addStream.apply(null, args) + } + return mergedStream +} + +// check and pause streams for pipe. +function pauseStreams (streams, options) { + if (!Array.isArray(streams)) { + // Backwards-compat with old-style streams + if (!streams._readableState && streams.pipe) { + streams = streams.pipe(PassThrough(options)) + } + if (!streams._readableState || !streams.pause || !streams.pipe) { + throw new Error('Only readable stream can be merged.') + } + streams.pause() + } else { + for (let i = 0, len = streams.length; i < len; i++) { + streams[i] = pauseStreams(streams[i], options) + } + } + return streams +} + + +/***/ }), + +/***/ 6228: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(3837); +const braces = __nccwpck_require__(610); +const picomatch = __nccwpck_require__(2864); +const utils = __nccwpck_require__(7426); +const isEmptyString = val => val === '' || val === './'; + +/** + * Returns an array of strings that match one or more glob patterns. + * + * ```js + * const mm = require('micromatch'); + * // mm(list, patterns[, options]); + * + * console.log(mm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {String|Array} `list` List of strings to match. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +const micromatch = (list, patterns, options) => { + patterns = [].concat(patterns); + list = [].concat(list); + + let omit = new Set(); + let keep = new Set(); + let items = new Set(); + let negatives = 0; + + let onResult = state => { + items.add(state.output); + if (options && options.onResult) { + options.onResult(state); + } + }; + + for (let i = 0; i < patterns.length; i++) { + let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); + let negated = isMatch.state.negated || isMatch.state.negatedExtglob; + if (negated) negatives++; + + for (let item of list) { + let matched = isMatch(item, true); + + let match = negated ? !matched.isMatch : matched.isMatch; + if (!match) continue; + + if (negated) { + omit.add(matched.output); + } else { + omit.delete(matched.output); + keep.add(matched.output); + } + } + } + + let result = negatives === patterns.length ? [...items] : [...keep]; + let matches = result.filter(item => !omit.has(item)); + + if (options && matches.length === 0) { + if (options.failglob === true) { + throw new Error(`No matches found for "${patterns.join(', ')}"`); + } + + if (options.nonull === true || options.nullglob === true) { + return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; + } + } + + return matches; +}; + +/** + * Backwards compatibility + */ + +micromatch.match = micromatch; + +/** + * Returns a matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * const mm = require('micromatch'); + * // mm.matcher(pattern[, options]); + * + * const isMatch = mm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` + * @return {Function} Returns a matcher function. + * @api public + */ + +micromatch.matcher = (pattern, options) => picomatch(pattern, options); + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const mm = require('micromatch'); + * // mm.isMatch(string, patterns[, options]); + * + * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(mm.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `[options]` See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Backwards compatibility + */ + +micromatch.any = micromatch.isMatch; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.not(list, patterns[, options]); + * + * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +micromatch.not = (list, patterns, options = {}) => { + patterns = [].concat(patterns).map(String); + let result = new Set(); + let items = []; + + let onResult = state => { + if (options.onResult) options.onResult(state); + items.push(state.output); + }; + + let matches = new Set(micromatch(list, patterns, { ...options, onResult })); + + for (let item of items) { + if (!matches.has(item)) { + result.add(item); + } + } + return [...result]; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var mm = require('micromatch'); + * // mm.contains(string, pattern[, options]); + * + * console.log(mm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(mm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any of the patterns matches any part of `str`. + * @api public + */ + +micromatch.contains = (str, pattern, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + if (Array.isArray(pattern)) { + return pattern.some(p => micromatch.contains(str, p, options)); + } + + if (typeof pattern === 'string') { + if (isEmptyString(str) || isEmptyString(pattern)) { + return false; + } + + if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { + return true; + } + } + + return micromatch.isMatch(str, pattern, { ...options, contains: true }); +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * const mm = require('micromatch'); + * // mm.matchKeys(object, patterns[, options]); + * + * const obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(mm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +micromatch.matchKeys = (obj, patterns, options) => { + if (!utils.isObject(obj)) { + throw new TypeError('Expected the first argument to be an object'); + } + let keys = micromatch(Object.keys(obj), patterns, options); + let res = {}; + for (let key of keys) res[key] = obj[key]; + return res; +}; + +/** + * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.some(list, patterns[, options]); + * + * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` + * @api public + */ + +micromatch.some = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (items.some(item => isMatch(item))) { + return true; + } + } + return false; +}; + +/** + * Returns true if every string in the given `list` matches + * any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.every(list, patterns[, options]); + * + * console.log(mm.every('foo.js', ['foo.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` + * @api public + */ + +micromatch.every = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (!items.every(item => isMatch(item))) { + return false; + } + } + return true; +}; + +/** + * Returns true if **all** of the given `patterns` match + * the specified string. + * + * ```js + * const mm = require('micromatch'); + * // mm.all(string, patterns[, options]); + * + * console.log(mm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.all = (str, patterns, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + return [].concat(patterns).every(p => picomatch(p, options)(str)); +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. + * + * ```js + * const mm = require('micromatch'); + * // mm.capture(pattern, string[, options]); + * + * console.log(mm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(mm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `glob` Glob pattern to use for matching. + * @param {String} `input` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. + * @api public + */ + +micromatch.capture = (glob, input, options) => { + let posix = utils.isWindows(options); + let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); + let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); + + if (match) { + return match.slice(1).map(v => v === void 0 ? '' : v); + } +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * const mm = require('micromatch'); + * // mm.makeRe(pattern[, options]); + * + * console.log(mm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +micromatch.makeRe = (...args) => picomatch.makeRe(...args); + +/** + * Scan a glob pattern to separate the pattern into segments. Used + * by the [split](#split) method. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.scan(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +micromatch.scan = (...args) => picomatch.scan(...args); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.parse(pattern[, options]); + * ``` + * @param {String} `glob` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as regex source string. + * @api public + */ + +micromatch.parse = (patterns, options) => { + let res = []; + for (let pattern of [].concat(patterns || [])) { + for (let str of braces(String(pattern), options)) { + res.push(picomatch.parse(str, options)); + } + } + return res; +}; + +/** + * Process the given brace `pattern`. + * + * ```js + * const { braces } = require('micromatch'); + * console.log(braces('foo/{a,b,c}/bar')); + * //=> [ 'foo/(a|b|c)/bar' ] + * + * console.log(braces('foo/{a,b,c}/bar', { expand: true })); + * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] + * ``` + * @param {String} `pattern` String with brace pattern to process. + * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. + * @return {Array} + * @api public + */ + +micromatch.braces = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { + return [pattern]; + } + return braces(pattern, options); +}; + +/** + * Expand braces + */ + +micromatch.braceExpand = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + return micromatch.braces(pattern, { ...options, expand: true }); +}; + +/** + * Expose micromatch + */ + +module.exports = micromatch; + + +/***/ }), + +/***/ 2864: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports = __nccwpck_require__(555); + + +/***/ }), + +/***/ 6476: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017); +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)` +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + SEP: path.sep, + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; + + +/***/ }), + +/***/ 5961: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const constants = __nccwpck_require__(6476); +const utils = __nccwpck_require__(7426); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(win32); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(win32); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; + + +/***/ }), + +/***/ 555: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017); +const scan = __nccwpck_require__(7751); +const parse = __nccwpck_require__(5961); +const utils = __nccwpck_require__(7426); +const constants = __nccwpck_require__(6476); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = utils.isWindows(options); + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(path.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; + + +/***/ }), + +/***/ 7751: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const utils = __nccwpck_require__(7426); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = __nccwpck_require__(6476); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; + + +/***/ }), + +/***/ 7426: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017); +const win32 = process.platform === 'win32'; +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = __nccwpck_require__(6476); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + return false; +}; + +exports.isWindows = options => { + if (options && typeof options.windows === 'boolean') { + return options.windows; + } + return win32 === true || path.sep === '\\'; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; + + +/***/ }), + +/***/ 2610: +/***/ ((module) => { + +"use strict"; + + +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProps = [ + 'destroy', + 'setTimeout', + 'socket', + 'headers', + 'trailers', + 'rawHeaders', + 'statusCode', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'rawTrailers', + 'statusMessage' +]; + +module.exports = (fromStream, toStream) => { + const fromProps = new Set(Object.keys(fromStream).concat(knownProps)); + + for (const prop of fromProps) { + // Don't overwrite existing properties + if (prop in toStream) { + continue; + } + + toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop]; + } +}; + + +/***/ }), + +/***/ 7952: +/***/ ((module) => { + +"use strict"; + + +// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs +const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain'; +const DATA_URL_DEFAULT_CHARSET = 'us-ascii'; + +const testParameter = (name, filters) => { + return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name); +}; + +const normalizeDataURL = (urlString, {stripHash}) => { + const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString); + + if (!match) { + throw new Error(`Invalid URL: ${urlString}`); + } + + let {type, data, hash} = match.groups; + const mediaType = type.split(';'); + hash = stripHash ? '' : hash; + + let isBase64 = false; + if (mediaType[mediaType.length - 1] === 'base64') { + mediaType.pop(); + isBase64 = true; + } + + // Lowercase MIME type + const mimeType = (mediaType.shift() || '').toLowerCase(); + const attributes = mediaType + .map(attribute => { + let [key, value = ''] = attribute.split('=').map(string => string.trim()); + + // Lowercase `charset` + if (key === 'charset') { + value = value.toLowerCase(); + + if (value === DATA_URL_DEFAULT_CHARSET) { + return ''; + } + } + + return `${key}${value ? `=${value}` : ''}`; + }) + .filter(Boolean); + + const normalizedMediaType = [ + ...attributes + ]; + + if (isBase64) { + normalizedMediaType.push('base64'); + } + + if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) { + normalizedMediaType.unshift(mimeType); + } + + return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`; +}; + +const normalizeUrl = (urlString, options) => { + options = { + defaultProtocol: 'http:', + normalizeProtocol: true, + forceHttp: false, + forceHttps: false, + stripAuthentication: true, + stripHash: false, + stripTextFragment: true, + stripWWW: true, + removeQueryParameters: [/^utm_\w+/i], + removeTrailingSlash: true, + removeSingleSlash: true, + removeDirectoryIndex: false, + sortQueryParameters: true, + ...options + }; + + urlString = urlString.trim(); + + // Data URL + if (/^data:/i.test(urlString)) { + return normalizeDataURL(urlString, options); + } + + if (/^view-source:/i.test(urlString)) { + throw new Error('`view-source:` is not supported as it is a non-standard protocol'); + } + + const hasRelativeProtocol = urlString.startsWith('//'); + const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString); + + // Prepend protocol + if (!isRelativeUrl) { + urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol); + } + + const urlObj = new URL(urlString); + + if (options.forceHttp && options.forceHttps) { + throw new Error('The `forceHttp` and `forceHttps` options cannot be used together'); + } + + if (options.forceHttp && urlObj.protocol === 'https:') { + urlObj.protocol = 'http:'; + } + + if (options.forceHttps && urlObj.protocol === 'http:') { + urlObj.protocol = 'https:'; + } + + // Remove auth + if (options.stripAuthentication) { + urlObj.username = ''; + urlObj.password = ''; + } + + // Remove hash + if (options.stripHash) { + urlObj.hash = ''; + } else if (options.stripTextFragment) { + urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, ''); + } + + // Remove duplicate slashes if not preceded by a protocol + if (urlObj.pathname) { + urlObj.pathname = urlObj.pathname.replace(/(? 0) { + let pathComponents = urlObj.pathname.split('/'); + const lastComponent = pathComponents[pathComponents.length - 1]; + + if (testParameter(lastComponent, options.removeDirectoryIndex)) { + pathComponents = pathComponents.slice(0, pathComponents.length - 1); + urlObj.pathname = pathComponents.slice(1).join('/') + '/'; + } + } + + if (urlObj.hostname) { + // Remove trailing dot + urlObj.hostname = urlObj.hostname.replace(/\.$/, ''); + + // Remove `www.` + if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) { + // Each label should be max 63 at length (min: 1). + // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + // Each TLD should be up to 63 characters long (min: 2). + // It is technically possible to have a single character TLD, but none currently exist. + urlObj.hostname = urlObj.hostname.replace(/^www\./, ''); + } + } + + // Remove query unwanted parameters + if (Array.isArray(options.removeQueryParameters)) { + for (const key of [...urlObj.searchParams.keys()]) { + if (testParameter(key, options.removeQueryParameters)) { + urlObj.searchParams.delete(key); + } + } + } + + if (options.removeQueryParameters === true) { + urlObj.search = ''; + } + + // Sort query parameters + if (options.sortQueryParameters) { + urlObj.searchParams.sort(); + } + + if (options.removeTrailingSlash) { + urlObj.pathname = urlObj.pathname.replace(/\/$/, ''); + } + + const oldUrlString = urlString; + + // Take advantage of many of the Node `url` normalizations + urlString = urlObj.toString(); + + if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') { + urlString = urlString.replace(/\/$/, ''); + } + + // Remove ending `/` unless removeSingleSlash is false + if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) { + urlString = urlString.replace(/\/$/, ''); + } + + // Restore relative protocol, if applicable + if (hasRelativeProtocol && !options.normalizeProtocol) { + urlString = urlString.replace(/^http:\/\//, '//'); + } + + // Remove http/https + if (options.stripProtocol) { + urlString = urlString.replace(/^(?:https?:)?\/\//, ''); + } + + return urlString; +}; + +module.exports = normalizeUrl; + + +/***/ }), + +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 9072: +/***/ ((module) => { + +"use strict"; + + +class CancelError extends Error { + constructor(reason) { + super(reason || 'Promise was canceled'); + this.name = 'CancelError'; + } + + get isCanceled() { + return true; + } +} + +class PCancelable { + static fn(userFn) { + return (...arguments_) => { + return new PCancelable((resolve, reject, onCancel) => { + arguments_.push(onCancel); + // eslint-disable-next-line promise/prefer-await-to-then + userFn(...arguments_).then(resolve, reject); + }); + }; + } + + constructor(executor) { + this._cancelHandlers = []; + this._isPending = true; + this._isCanceled = false; + this._rejectOnCancel = true; + + this._promise = new Promise((resolve, reject) => { + this._reject = reject; + + const onResolve = value => { + if (!this._isCanceled || !onCancel.shouldReject) { + this._isPending = false; + resolve(value); + } + }; + + const onReject = error => { + this._isPending = false; + reject(error); + }; + + const onCancel = handler => { + if (!this._isPending) { + throw new Error('The `onCancel` handler was attached after the promise settled.'); + } + + this._cancelHandlers.push(handler); + }; + + Object.defineProperties(onCancel, { + shouldReject: { + get: () => this._rejectOnCancel, + set: boolean => { + this._rejectOnCancel = boolean; + } + } + }); + + return executor(onResolve, onReject, onCancel); + }); + } + + then(onFulfilled, onRejected) { + // eslint-disable-next-line promise/prefer-await-to-then + return this._promise.then(onFulfilled, onRejected); + } + + catch(onRejected) { + return this._promise.catch(onRejected); + } + + finally(onFinally) { + return this._promise.finally(onFinally); + } + + cancel(reason) { + if (!this._isPending || this._isCanceled) { + return; + } + + this._isCanceled = true; + + if (this._cancelHandlers.length > 0) { + try { + for (const handler of this._cancelHandlers) { + handler(); + } + } catch (error) { + this._reject(error); + return; + } + } + + if (this._rejectOnCancel) { + this._reject(new CancelError(reason)); + } + } + + get isCanceled() { + return this._isCanceled; + } +} + +Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); + +module.exports = PCancelable; +module.exports.CancelError = CancelError; + + +/***/ }), + +/***/ 8569: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const os = __nccwpck_require__(2037); +const pico = __nccwpck_require__(3322); + +const isWindows = os.platform() === 'win32'; + +function picomatch(glob, options, returnState = false) { + // default to os.platform() + if (options && (options.windows === null || options.windows === undefined)) { + // don't mutate the original options object + options = { ...options, windows: isWindows }; + } + return pico(glob, options, returnState); +} + +module.exports = picomatch; +// public api +module.exports.test = pico.test; +module.exports.matchBase = pico.matchBase; +module.exports.isMatch = pico.isMatch; +module.exports.parse = pico.parse; +module.exports.scan = pico.scan; +module.exports.compileRe = pico.compileRe; +module.exports.toRegex = pico.toRegex; +// for tests +module.exports.makeRe = pico.makeRe; + + +/***/ }), + +/***/ 6099: +/***/ ((module) => { + +"use strict"; + + +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; +const SEP = '/'; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR, + SEP +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)`, + SEP: '\\' +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; + + +/***/ }), + +/***/ 2139: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const constants = __nccwpck_require__(6099); +const utils = __nccwpck_require__(479); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(opts.windows); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(opts.windows); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; + + +/***/ }), + +/***/ 3322: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const scan = __nccwpck_require__(2429); +const parse = __nccwpck_require__(2139); +const utils = __nccwpck_require__(479); +const constants = __nccwpck_require__(6099); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = opts.windows; + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(utils.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; + + +/***/ }), + +/***/ 2429: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const utils = __nccwpck_require__(479); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = __nccwpck_require__(6099); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; + + +/***/ }), + +/***/ 479: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = __nccwpck_require__(6099); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + if (typeof process !== 'undefined') { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + } + return false; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; + +exports.basename = (path, { windows } = {}) => { + const segs = path.split(windows ? /[\\/]/ : '/'); + const last = segs[segs.length - 1]; + + if (last === '') { + return segs[segs.length - 2]; + } + + return last; +}; + + +/***/ }), + +/***/ 8341: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223) +var eos = __nccwpck_require__(1205) +var fs = __nccwpck_require__(7147) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), + +/***/ 9795: +/***/ ((module) => { + +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) + + +/***/ }), + +/***/ 9273: +/***/ ((module) => { + +"use strict"; + + +class QuickLRU { + constructor(options = {}) { + if (!(options.maxSize && options.maxSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + this.maxSize = options.maxSize; + this.onEviction = options.onEviction; + this.cache = new Map(); + this.oldCache = new Map(); + this._size = 0; + } + + _set(key, value) { + this.cache.set(key, value); + this._size++; + + if (this._size >= this.maxSize) { + this._size = 0; + + if (typeof this.onEviction === 'function') { + for (const [key, value] of this.oldCache.entries()) { + this.onEviction(key, value); + } + } + + this.oldCache = this.cache; + this.cache = new Map(); + } + } + + get(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + const value = this.oldCache.get(key); + this.oldCache.delete(key); + this._set(key, value); + return value; + } + } + + set(key, value) { + if (this.cache.has(key)) { + this.cache.set(key, value); + } else { + this._set(key, value); + } + + return this; + } + + has(key) { + return this.cache.has(key) || this.oldCache.has(key); + } + + peek(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + return this.oldCache.get(key); + } + } + + delete(key) { + const deleted = this.cache.delete(key); + if (deleted) { + this._size--; + } + + return this.oldCache.delete(key) || deleted; + } + + clear() { + this.cache.clear(); + this.oldCache.clear(); + this._size = 0; + } + + * keys() { + for (const [key] of this) { + yield key; + } + } + + * values() { + for (const [, value] of this) { + yield value; + } + } + + * [Symbol.iterator]() { + for (const item of this.cache) { + yield item; + } + + for (const item of this.oldCache) { + const [key] = item; + if (!this.cache.has(key)) { + yield item; + } + } + } + + get size() { + let oldCacheSize = 0; + for (const key of this.oldCache.keys()) { + if (!this.cache.has(key)) { + oldCacheSize++; + } + } + + return Math.min(this._size + oldCacheSize, this.maxSize); + } +} + +module.exports = QuickLRU; + + +/***/ }), + +/***/ 6624: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const tls = __nccwpck_require__(4404); + +module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { + let timeout = false; + + let socket; + + const callback = async () => { + await socketPromise; + + socket.off('timeout', onTimeout); + socket.off('error', reject); + + if (options.resolveSocket) { + resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); + + if (timeout) { + await Promise.resolve(); + socket.emit('timeout'); + } + } else { + socket.destroy(); + resolve({alpnProtocol: socket.alpnProtocol, timeout}); + } + }; + + const onTimeout = async () => { + timeout = true; + callback(); + }; + + const socketPromise = (async () => { + try { + socket = await connect(options, callback); + + socket.on('error', reject); + socket.once('timeout', onTimeout); + } catch (error) { + reject(error); + } + })(); +}); + + +/***/ }), + +/***/ 9004: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Readable = (__nccwpck_require__(2781).Readable); +const lowercaseKeys = __nccwpck_require__(9662); + +class Response extends Readable { + constructor(statusCode, headers, body, url) { + if (typeof statusCode !== 'number') { + throw new TypeError('Argument `statusCode` should be a number'); + } + if (typeof headers !== 'object') { + throw new TypeError('Argument `headers` should be an object'); + } + if (!(body instanceof Buffer)) { + throw new TypeError('Argument `body` should be a buffer'); + } + if (typeof url !== 'string') { + throw new TypeError('Argument `url` should be a string'); + } + + super(); + this.statusCode = statusCode; + this.headers = lowercaseKeys(headers); + this.body = body; + this.url = url; + } + + _read() { + this.push(this.body); + this.push(null); + } +} + +module.exports = Response; + + +/***/ }), + +/***/ 2113: +/***/ ((module) => { + +"use strict"; + + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify + + +/***/ }), + +/***/ 5288: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! run-parallel. MIT License. Feross Aboukhadijeh */ +module.exports = runParallel + +const queueMicrotask = __nccwpck_require__(9795) + +function runParallel (tasks, cb) { + let results, pending, keys + let isSync = true + + if (Array.isArray(tasks)) { + results = [] + pending = tasks.length + } else { + keys = Object.keys(tasks) + results = {} + pending = keys.length + } + + function done (err) { + function end () { + if (cb) cb(err, results) + cb = null + } + if (isSync) queueMicrotask(end) + else end() + } + + function each (i, err, result) { + results[i] = result + if (--pending === 0 || err) { + done(err) + } + } + + if (!pending) { + // empty + done(null) + } else if (keys) { + // object + keys.forEach(function (key) { + tasks[key](function (err, result) { each(key, err, result) }) + }) + } else { + // array + tasks.forEach(function (task, i) { + task(function (err, result) { each(i, err, result) }) + }) + } + + isSync = false +} + + +/***/ }), + +/***/ 2043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = (__nccwpck_require__(2781).Stream) + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = (__nccwpck_require__(1576).StringDecoder) + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //