diff --git a/__tests__/dart-json.test.ts b/__tests__/dart-json.test.ts
index cd2d683..feae7e6 100644
--- a/__tests__/dart-json.test.ts
+++ b/__tests__/dart-json.test.ts
@@ -4,14 +4,14 @@ import * as path from 'path'
import {DartJsonParser} from '../src/parsers/dart-json/dart-json-parser'
import {ParseOptions} from '../src/test-parser'
import {getReport} from '../src/report/get-report'
-import {normalizeFilePath} from '../src/utils/file-utils'
+import {normalizeFilePath} from '../src/utils/path-utils'
describe('dart-json tests', () => {
it('matches report snapshot', async () => {
const opts: ParseOptions = {
parseErrors: true,
- trackedFiles: ['lib/main.dart', 'test/main_test.dart', 'test/second_test.dart'],
- workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dart/'
+ trackedFiles: ['lib/main.dart', 'test/main_test.dart', 'test/second_test.dart']
+ //workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dart/'
}
const fixturePath = path.join(__dirname, 'fixtures', 'dart-json.json')
@@ -38,8 +38,8 @@ describe('dart-json tests', () => {
const trackedFiles = fs.readFileSync(trackedFilesPath, {encoding: 'utf8'}).split(/\n\r?/g)
const opts: ParseOptions = {
trackedFiles,
- parseErrors: true,
- workDir: '/__w/provider/provider/'
+ parseErrors: true
+ //workDir: '/__w/provider/provider/'
}
const parser = new DartJsonParser(opts, 'flutter')
diff --git a/__tests__/dotnet-trx.test.ts b/__tests__/dotnet-trx.test.ts
index 72ae170..b32bca1 100644
--- a/__tests__/dotnet-trx.test.ts
+++ b/__tests__/dotnet-trx.test.ts
@@ -4,7 +4,7 @@ import * as path from 'path'
import {DotnetTrxParser} from '../src/parsers/dotnet-trx/dotnet-trx-parser'
import {ParseOptions} from '../src/test-parser'
import {getReport} from '../src/report/get-report'
-import {normalizeFilePath} from '../src/utils/file-utils'
+import {normalizeFilePath} from '../src/utils/path-utils'
describe('dotnet-trx tests', () => {
it('matches report snapshot', async () => {
@@ -15,8 +15,8 @@ describe('dotnet-trx tests', () => {
const opts: ParseOptions = {
parseErrors: true,
- trackedFiles: ['DotnetTests.Unit/Calculator.cs', 'DotnetTests.XUnitTests/CalculatorTests.cs'],
- workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
+ trackedFiles: ['DotnetTests.Unit/Calculator.cs', 'DotnetTests.XUnitTests/CalculatorTests.cs']
+ //workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
}
const parser = new DotnetTrxParser(opts)
@@ -36,8 +36,7 @@ describe('dotnet-trx tests', () => {
const opts: ParseOptions = {
trackedFiles: [],
- parseErrors: true,
- workDir: ''
+ parseErrors: true
}
const parser = new DotnetTrxParser(opts)
diff --git a/__tests__/jest-junit.test.ts b/__tests__/jest-junit.test.ts
index c8a915a..6e5b86b 100644
--- a/__tests__/jest-junit.test.ts
+++ b/__tests__/jest-junit.test.ts
@@ -4,7 +4,7 @@ import * as path from 'path'
import {JestJunitParser} from '../src/parsers/jest-junit/jest-junit-parser'
import {ParseOptions} from '../src/test-parser'
import {getReport} from '../src/report/get-report'
-import {normalizeFilePath} from '../src/utils/file-utils'
+import {normalizeFilePath} from '../src/utils/path-utils'
describe('jest-junit tests', () => {
it('report from ./reports/jest test results matches snapshot', async () => {
@@ -15,8 +15,8 @@ describe('jest-junit tests', () => {
const opts: ParseOptions = {
parseErrors: true,
- trackedFiles: ['__tests__/main.test.js', '__tests__/second.test.js', 'lib/main.js'],
- workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/jest/'
+ trackedFiles: ['__tests__/main.test.js', '__tests__/second.test.js', 'lib/main.js']
+ //workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/jest/'
}
const parser = new JestJunitParser(opts)
@@ -38,8 +38,8 @@ describe('jest-junit tests', () => {
const trackedFiles = fs.readFileSync(trackedFilesPath, {encoding: 'utf8'}).split(/\n\r?/g)
const opts: ParseOptions = {
parseErrors: true,
- trackedFiles,
- workDir: '/home/dorny/dorny/jest/'
+ trackedFiles
+ //workDir: '/home/dorny/dorny/jest/'
}
const parser = new JestJunitParser(opts)
diff --git a/dist/index.js b/dist/index.js
index e1c9a5f..137debc 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -2,7 +2,7 @@ require('./sourcemap-register.js');module.exports =
/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
-/***/ 3109:
+/***/ 7171:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -30,134 +30,297 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getFiles = void 0;
-const core = __importStar(__nccwpck_require__(2186));
+exports.ArtifactProvider = void 0;
const github = __importStar(__nccwpck_require__(5438));
+const adm_zip_1 = __importDefault(__nccwpck_require__(6761));
+const picomatch_1 = __importDefault(__nccwpck_require__(8569));
+const github_utils_1 = __nccwpck_require__(3522);
+class ArtifactProvider {
+ constructor(octokit, artifact, name, pattern, sha, runId) {
+ this.octokit = octokit;
+ this.artifact = artifact;
+ this.name = name;
+ this.pattern = pattern;
+ this.sha = sha;
+ this.runId = runId;
+ if (this.artifact.startsWith('/')) {
+ const re = new RegExp(this.artifact);
+ this.artifactNameMatch = (str) => re.test(str);
+ this.getReportName = (str) => {
+ const match = str.match(re);
+ if (match === null) {
+ throw new Error(`Artifact name '${str}' does not match regex ${this.artifact}`);
+ }
+ let reportName = this.name;
+ for (let i = 1; i < match.length; i++) {
+ reportName = reportName.replace(new RegExp(`$${i}`, 'g'), match[i]);
+ }
+ return reportName;
+ };
+ }
+ else {
+ this.artifactNameMatch = (str) => str === this.artifact;
+ this.getReportName = () => this.name;
+ }
+ this.fileNameMatch = picomatch_1.default(pattern);
+ }
+ async load() {
+ const result = {};
+ const resp = await this.octokit.actions.listWorkflowRunArtifacts({
+ ...github.context.repo,
+ run_id: this.runId
+ });
+ const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name));
+ for (const art of artifacts) {
+ await github_utils_1.downloadArtifact(this.octokit, art.id, art.name);
+ const reportName = this.getReportName(art.name);
+ const files = [];
+ const zip = new adm_zip_1.default(art.name);
+ for (const entry of zip.getEntries()) {
+ const file = entry.name;
+ if (entry.isDirectory || !this.fileNameMatch(file))
+ continue;
+ const content = zip.readAsText(entry);
+ files.push({ file, content });
+ }
+ if (result[reportName]) {
+ result[reportName].push(...files);
+ }
+ else {
+ result[reportName] = files;
+ }
+ }
+ return result;
+ }
+ async listTrackedFiles() {
+ return github_utils_1.listFiles(this.octokit, this.sha);
+ }
+}
+exports.ArtifactProvider = ArtifactProvider;
+
+
+/***/ }),
+
+/***/ 9399:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.LocalFileProvider = void 0;
const fs = __importStar(__nccwpck_require__(5747));
const fast_glob_1 = __importDefault(__nccwpck_require__(3664));
+const git_1 = __nccwpck_require__(9844);
+class LocalFileProvider {
+ constructor(name, pattern) {
+ this.name = name;
+ this.pattern = pattern;
+ }
+ async load() {
+ const result = [];
+ for (const pat of this.pattern) {
+ const paths = await fast_glob_1.default(pat, { dot: true });
+ for (const file of paths) {
+ const content = await fs.promises.readFile(file, { encoding: 'utf8' });
+ result.push({ file, content });
+ }
+ }
+ return { [this.name]: result };
+ }
+ async listTrackedFiles() {
+ return git_1.listFiles();
+ }
+}
+exports.LocalFileProvider = LocalFileProvider;
+
+
+/***/ }),
+
+/***/ 3109:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const core = __importStar(__nccwpck_require__(2186));
+const github = __importStar(__nccwpck_require__(5438));
+const artifact_provider_1 = __nccwpck_require__(7171);
+const local_file_provider_1 = __nccwpck_require__(9399);
const get_annotations_1 = __nccwpck_require__(5867);
const get_report_1 = __nccwpck_require__(3737);
const dart_json_parser_1 = __nccwpck_require__(4528);
const dotnet_trx_parser_1 = __nccwpck_require__(2664);
const jest_junit_parser_1 = __nccwpck_require__(1113);
-const file_utils_1 = __nccwpck_require__(2711);
-const git_1 = __nccwpck_require__(9844);
+const path_utils_1 = __nccwpck_require__(4070);
const github_utils_1 = __nccwpck_require__(3522);
const markdown_utils_1 = __nccwpck_require__(6482);
-async function run() {
+async function main() {
try {
- await main();
+ const testReporter = new TestReporter();
+ await testReporter.run();
}
catch (error) {
core.setFailed(error.message);
}
}
-async function main() {
- const name = core.getInput('name', { required: true });
- const path = core.getInput('path', { required: true });
- const reporter = core.getInput('reporter', { required: true });
- const listSuites = core.getInput('list-suites', { required: true });
- const listTests = core.getInput('list-tests', { required: true });
- const maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));
- const failOnError = core.getInput('fail-on-error', { required: true }) === 'true';
- const workDirInput = core.getInput('working-directory', { required: false });
- const token = core.getInput('token', { required: true });
- if (listSuites !== 'all' && listSuites !== 'failed') {
- core.setFailed(`Input parameter 'list-suites' has invalid value`);
- return;
- }
- if (listTests !== 'all' && listTests !== 'failed' && listTests !== 'none') {
- core.setFailed(`Input parameter 'list-tests' has invalid value`);
- return;
- }
- if (isNaN(maxAnnotations) || maxAnnotations < 0 || maxAnnotations > 50) {
- core.setFailed(`Input parameter 'max-annotations' has invalid value`);
- return;
+class TestReporter {
+ constructor() {
+ this.artifact = core.getInput('artifact', { required: false });
+ this.name = core.getInput('name', { required: true });
+ this.path = core.getInput('path', { required: true });
+ this.reporter = core.getInput('reporter', { required: true });
+ this.listSuites = core.getInput('list-suites', { required: true });
+ this.listTests = core.getInput('list-tests', { required: true });
+ this.maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));
+ this.failOnError = core.getInput('fail-on-error', { required: true }) === 'true';
+ this.workDirInput = core.getInput('working-directory', { required: false });
+ this.token = core.getInput('token', { required: true });
+ this.context = github_utils_1.getCheckRunContext();
+ this.octokit = github.getOctokit(this.token);
+ if (this.listSuites !== 'all' && this.listSuites !== 'failed') {
+ core.setFailed(`Input parameter 'list-suites' has invalid value`);
+ return;
+ }
+ if (this.listTests !== 'all' && this.listTests !== 'failed' && this.listTests !== 'none') {
+ core.setFailed(`Input parameter 'list-tests' has invalid value`);
+ return;
+ }
+ if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {
+ core.setFailed(`Input parameter 'max-annotations' has invalid value`);
+ return;
+ }
}
- if (workDirInput) {
- core.info(`Changing directory to '${workDirInput}'`);
- process.chdir(workDirInput);
- }
- const workDir = file_utils_1.normalizeDirPath(process.cwd(), true);
- core.info(`Using working-directory '${workDir}'`);
- const octokit = github.getOctokit(token);
- const sha = github_utils_1.getCheckRunSha();
- // We won't need tracked files if we are not going to create annotations
- const parseErrors = maxAnnotations > 0;
- const trackedFiles = parseErrors ? await git_1.listFiles() : [];
- const options = {
- trackedFiles,
- workDir,
- parseErrors
- };
- core.info(`Using test report parser '${reporter}'`);
- const parser = getParser(reporter, options);
- const files = await getFiles(path);
- if (files.length === 0) {
- core.setFailed(`No file matches path '${path}'`);
- return;
+ async run() {
+ if (this.workDirInput) {
+ core.info(`Changing directory to '${this.workDirInput}'`);
+ process.chdir(this.workDirInput);
+ }
+ const pattern = this.path.split(',');
+ const inputProvider = this.artifact
+ ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId)
+ : new local_file_provider_1.LocalFileProvider(this.name, pattern);
+ const parseErrors = this.maxAnnotations > 0;
+ const trackedFiles = await inputProvider.listTrackedFiles();
+ const workDir = this.artifact ? undefined : path_utils_1.normalizeDirPath(process.cwd(), true);
+ const options = {
+ workDir,
+ trackedFiles,
+ parseErrors
+ };
+ core.info(`Using test report parser '${this.reporter}'`);
+ const parser = this.getParser(this.reporter, options);
+ const results = [];
+ const input = await inputProvider.load();
+ for (const [reportName, files] of Object.entries(input)) {
+ const tr = await this.createReport(parser, reportName, files);
+ results.push(...tr);
+ }
+ const isFailed = results.some(tr => tr.result === 'failed');
+ const conclusion = isFailed ? 'failure' : 'success';
+ const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
+ const failed = results.reduce((sum, tr) => sum + tr.failed, 0);
+ const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
+ const time = results.reduce((sum, tr) => sum + tr.time, 0);
+ core.setOutput('conclusion', conclusion);
+ core.setOutput('passed', passed);
+ core.setOutput('failed', failed);
+ core.setOutput('skipped', skipped);
+ core.setOutput('time', time);
+ if (this.failOnError && isFailed) {
+ core.setFailed(`Failed test has been found and 'fail-on-error' option is set to ${this.failOnError}`);
+ }
+ }
+ async createReport(parser, name, files) {
+ if (files.length === 0) {
+ core.error(`${name}: No file matches path ${this.path}`);
+ return [];
+ }
+ const results = [];
+ for (const { file, content } of files) {
+ core.info(`Processing test report '${file}'`);
+ const tr = await parser.parse(file, content);
+ results.push(tr);
+ }
+ core.info('Creating report summary');
+ const { listSuites, listTests } = this;
+ const summary = get_report_1.getReport(results, { listSuites, listTests });
+ core.info('Creating annotations');
+ const annotations = get_annotations_1.getAnnotations(results, this.maxAnnotations);
+ const isFailed = results.some(tr => tr.result === 'failed');
+ const conclusion = isFailed ? 'failure' : 'success';
+ const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success;
+ core.info(`Creating check run '${name}' with conclusion '${conclusion}'`);
+ await this.octokit.checks.create({
+ head_sha: this.context.sha,
+ name,
+ conclusion,
+ status: 'completed',
+ output: {
+ title: `${name} ${icon}`,
+ summary,
+ annotations
+ },
+ ...github.context.repo
+ });
+ return results;
}
- const results = [];
- for (const file of files) {
- core.info(`Processing test report '${file}'`);
- const content = await fs.promises.readFile(file, { encoding: 'utf8' });
- const tr = await parser.parse(file, content);
- results.push(tr);
- }
- core.info('Creating report summary');
- const summary = get_report_1.getReport(results, { listSuites, listTests });
- core.info('Creating annotations');
- const annotations = get_annotations_1.getAnnotations(results, maxAnnotations);
- const isFailed = results.some(tr => tr.result === 'failed');
- const conclusion = isFailed ? 'failure' : 'success';
- const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success;
- core.info(`Creating check run '${name}' with conclusion '${conclusion}'`);
- await octokit.checks.create({
- head_sha: sha,
- name,
- conclusion,
- status: 'completed',
- output: {
- title: `${name} ${icon}`,
- summary,
- annotations
- },
- ...github.context.repo
- });
- const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
- const failed = results.reduce((sum, tr) => sum + tr.failed, 0);
- const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
- const time = results.reduce((sum, tr) => sum + tr.time, 0);
- core.setOutput('conclusion', conclusion);
- core.setOutput('passed', passed);
- core.setOutput('failed', failed);
- core.setOutput('skipped', skipped);
- core.setOutput('time', time);
- if (failOnError && isFailed) {
- core.setFailed(`Failed test has been found and 'fail-on-error' option is set to ${failOnError}`);
- }
-}
-function getParser(reporter, options) {
- switch (reporter) {
- case 'dart-json':
- return new dart_json_parser_1.DartJsonParser(options, 'dart');
- case 'dotnet-trx':
- return new dotnet_trx_parser_1.DotnetTrxParser(options);
- case 'flutter-json':
- return new dart_json_parser_1.DartJsonParser(options, 'flutter');
- case 'jest-junit':
- return new jest_junit_parser_1.JestJunitParser(options);
- default:
- throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);
+ getParser(reporter, options) {
+ switch (reporter) {
+ case 'dart-json':
+ return new dart_json_parser_1.DartJsonParser(options, 'dart');
+ case 'dotnet-trx':
+ return new dotnet_trx_parser_1.DotnetTrxParser(options);
+ case 'flutter-json':
+ return new dart_json_parser_1.DartJsonParser(options, 'flutter');
+ case 'jest-junit':
+ return new jest_junit_parser_1.JestJunitParser(options);
+ default:
+ throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);
+ }
}
}
-async function getFiles(pattern) {
- const tasks = pattern.split(',').map(async (pat) => fast_glob_1.default(pat, { dot: true }));
- const paths = await Promise.all(tasks);
- return paths.flat();
-}
-exports.getFiles = getFiles;
-run();
+main();
/***/ }),
@@ -169,7 +332,7 @@ run();
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DartJsonParser = void 0;
-const file_utils_1 = __nccwpck_require__(2711);
+const path_utils_1 = __nccwpck_require__(4070);
const dart_json_types_1 = __nccwpck_require__(7887);
const test_results_1 = __nccwpck_require__(2768);
class TestRun {
@@ -337,7 +500,7 @@ class DartJsonParser {
const match = str.match(re);
if (match !== null) {
const [_, pathStr, lineStr] = match;
- const path = file_utils_1.normalizeFilePath(this.getRelativePath(pathStr));
+ const path = path_utils_1.normalizeFilePath(this.getRelativePath(pathStr));
if (trackedFiles.includes(path)) {
const line = parseInt(lineStr);
return { path, line };
@@ -346,15 +509,20 @@ class DartJsonParser {
}
}
getRelativePath(path) {
- const { workDir } = this.options;
const prefix = 'file://';
if (path.startsWith(prefix)) {
path = path.substr(prefix.length);
}
- if (path.startsWith(workDir)) {
+ path = path_utils_1.normalizeFilePath(path);
+ const workDir = this.getWorkDir(path);
+ if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substr(workDir.length);
}
- return file_utils_1.normalizeFilePath(path);
+ return path;
+ }
+ getWorkDir(path) {
+ var _a, _b;
+ return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));
}
}
exports.DartJsonParser = DartJsonParser;
@@ -410,7 +578,7 @@ exports.isMessageEvent = isMessageEvent;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DotnetTrxParser = void 0;
const xml2js_1 = __nccwpck_require__(6189);
-const file_utils_1 = __nccwpck_require__(2711);
+const path_utils_1 = __nccwpck_require__(4070);
const parse_utils_1 = __nccwpck_require__(7811);
const test_results_1 = __nccwpck_require__(2768);
class TestClass {
@@ -523,20 +691,27 @@ class DotnetTrxParser {
exceptionThrowSource(stackTrace) {
const lines = stackTrace.split(/\r*\n/);
const re = / in (.+):line (\d+)$/;
- const { workDir, trackedFiles } = this.options;
+ const { trackedFiles } = this.options;
for (const str of lines) {
const match = str.match(re);
if (match !== null) {
const [_, fileStr, lineStr] = match;
- const filePath = file_utils_1.normalizeFilePath(fileStr);
- const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;
- if (trackedFiles.includes(file)) {
- const line = parseInt(lineStr);
- return { path: file, line };
+ const filePath = path_utils_1.normalizeFilePath(fileStr);
+ const workDir = this.getWorkDir(filePath);
+ if (workDir) {
+ const file = filePath.substr(workDir.length);
+ if (trackedFiles.includes(file)) {
+ const line = parseInt(lineStr);
+ return { path: file, line };
+ }
}
}
}
}
+ getWorkDir(path) {
+ var _a, _b;
+ return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));
+ }
}
exports.DotnetTrxParser = DotnetTrxParser;
@@ -551,7 +726,7 @@ exports.DotnetTrxParser = DotnetTrxParser;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.JestJunitParser = void 0;
const xml2js_1 = __nccwpck_require__(6189);
-const file_utils_1 = __nccwpck_require__(2711);
+const path_utils_1 = __nccwpck_require__(4070);
const test_results_1 = __nccwpck_require__(2768);
class JestJunitParser {
constructor(options) {
@@ -628,13 +803,20 @@ class JestJunitParser {
exceptionThrowSource(stackTrace) {
const lines = stackTrace.split(/\r?\n/);
const re = /\((.*):(\d+):\d+\)$/;
- const { workDir, trackedFiles } = this.options;
+ const { trackedFiles } = this.options;
for (const str of lines) {
const match = str.match(re);
if (match !== null) {
const [_, fileStr, lineStr] = match;
- const filePath = file_utils_1.normalizeFilePath(fileStr);
- const path = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;
+ const filePath = path_utils_1.normalizeFilePath(fileStr);
+ if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) {
+ continue;
+ }
+ const workDir = this.getWorkDir(filePath);
+ if (!workDir) {
+ continue;
+ }
+ const path = filePath.substr(workDir.length);
if (trackedFiles.includes(path)) {
const line = parseInt(lineStr);
return { path, line };
@@ -642,6 +824,10 @@ class JestJunitParser {
}
}
}
+ getWorkDir(path) {
+ var _a, _b;
+ return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));
+ }
}
exports.JestJunitParser = JestJunitParser;
@@ -1060,35 +1246,6 @@ async function exec(commandLine, args, options) {
exports.default = exec;
-/***/ }),
-
-/***/ 2711:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.normalizeFilePath = exports.normalizeDirPath = void 0;
-function normalizeDirPath(path, addTrailingSlash) {
- if (!path) {
- return path;
- }
- path = normalizeFilePath(path);
- if (addTrailingSlash && !path.endsWith('/')) {
- path += '/';
- }
- return path;
-}
-exports.normalizeDirPath = normalizeDirPath;
-function normalizeFilePath(path) {
- if (!path) {
- return path;
- }
- return path.trim().replace(/\\/g, '/');
-}
-exports.normalizeFilePath = normalizeFilePath;
-
-
/***/ }),
/***/ 9844:
@@ -1169,17 +1326,89 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getCheckRunSha = void 0;
+exports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0;
+const fs_1 = __nccwpck_require__(5747);
+const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438));
-function getCheckRunSha() {
- if (github.context.payload.pull_request) {
+const stream = __importStar(__nccwpck_require__(2413));
+const util_1 = __nccwpck_require__(1669);
+const got_1 = __importDefault(__nccwpck_require__(3061));
+const asyncStream = util_1.promisify(stream.pipeline);
+function getCheckRunContext() {
+ if (github.context.eventName === 'workflow_run') {
+ const event = github.context.payload;
+ if (!event.workflow_run) {
+ throw new Error("Event of type 'workflow_run' is missing 'workflow_run' field");
+ }
+ return {
+ sha: event.workflow_run.head_commit.id,
+ runId: event.workflow_run.id
+ };
+ }
+ const runId = github.context.runId;
+ if (github.context.eventName === 'pullrequest' && github.context.payload.pull_request) {
const pr = github.context.payload.pull_request;
- return pr.head.sha;
+ return { sha: pr.head.sha, runId };
+ }
+ return { sha: github.context.sha, runId };
+}
+exports.getCheckRunContext = getCheckRunContext;
+async function downloadArtifact(octokit, artifactId, fileName) {
+ const resp = await octokit.actions.downloadArtifact({
+ ...github.context.repo,
+ artifact_id: artifactId,
+ archive_format: 'zip'
+ });
+ const url = resp.headers.location;
+ if (url === undefined) {
+ throw new Error('Location header was not found in API response');
+ }
+ const downloadStream = got_1.default.stream(url);
+ const fileWriterStream = fs_1.createWriteStream(fileName);
+ downloadStream.on('downloadProgress', ({ transferred, total, percent }) => {
+ const percentage = Math.round(percent * 100);
+ core.info(`progress: ${transferred}/${total} (${percentage}%)`);
+ });
+ core.startGroup(`Downloading ${fileName} from ${url}`);
+ try {
+ await asyncStream(downloadStream, fileWriterStream);
+ }
+ finally {
+ core.endGroup();
+ }
+}
+exports.downloadArtifact = downloadArtifact;
+async function listFiles(octokit, sha) {
+ core.info('Fetching list of tracked files from GitHub');
+ const commit = await octokit.git.getCommit({
+ commit_sha: sha,
+ ...github.context.repo
+ });
+ return await listGitTree(octokit, commit.data.tree.sha, '');
+}
+exports.listFiles = listFiles;
+async function listGitTree(octokit, sha, path) {
+ const tree = await octokit.git.getTree({
+ tree_sha: sha,
+ ...github.context.repo
+ });
+ const result = [];
+ for (const tr of tree.data.tree) {
+ const file = `${path}${tr.path}`;
+ if (tr.type === 'tree') {
+ const files = await listGitTree(octokit, tr.sha, `${file}/`);
+ result.push(...files);
+ }
+ else {
+ result.push(file);
+ }
}
- return github.context.sha;
+ return result;
}
-exports.getCheckRunSha = getCheckRunSha;
/***/ }),
@@ -1269,6 +1498,52 @@ function parseIsoDate(str) {
exports.parseIsoDate = parseIsoDate;
+/***/ }),
+
+/***/ 4070:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getBasePath = exports.normalizeFilePath = exports.normalizeDirPath = void 0;
+function normalizeDirPath(path, addTrailingSlash) {
+ if (!path) {
+ return path;
+ }
+ path = normalizeFilePath(path);
+ if (addTrailingSlash && !path.endsWith('/')) {
+ path += '/';
+ }
+ return path;
+}
+exports.normalizeDirPath = normalizeDirPath;
+function normalizeFilePath(path) {
+ if (!path) {
+ return path;
+ }
+ return path.trim().replace(/\\/g, '/');
+}
+exports.normalizeFilePath = normalizeFilePath;
+function getBasePath(path, trackedFiles) {
+ if (trackedFiles.includes(path)) {
+ return '';
+ }
+ let max = '';
+ for (const file of trackedFiles) {
+ if (path.endsWith(file) && file.length > max.length) {
+ max = file;
+ }
+ }
+ if (max === '') {
+ return undefined;
+ }
+ const base = path.substr(0, path.length - max.length);
+ return base;
+}
+exports.getBasePath = getBasePath;
+
+
/***/ }),
/***/ 3328:
@@ -6927,3596 +7202,12807 @@ exports.isPlainObject = isPlainObject;
/***/ }),
-/***/ 3682:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-var register = __nccwpck_require__(4670)
-var addHook = __nccwpck_require__(5549)
-var removeHook = __nccwpck_require__(6819)
-
-// bind with array of arguments: https://stackoverflow.com/a/21792913
-var bind = Function.bind
-var bindable = bind.bind(bind)
+/***/ 7678:
+/***/ ((module, exports) => {
-function bindApi (hook, state, name) {
- var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
- hook.api = { remove: removeHookRef }
- hook.remove = removeHookRef
+"use strict";
- ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
- var args = name ? [state, kind, name] : [state, kind]
- hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
- })
+///
+///
+///
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const typedArrayTypeNames = [
+ 'Int8Array',
+ 'Uint8Array',
+ 'Uint8ClampedArray',
+ 'Int16Array',
+ 'Uint16Array',
+ 'Int32Array',
+ 'Uint32Array',
+ 'Float32Array',
+ 'Float64Array',
+ 'BigInt64Array',
+ 'BigUint64Array'
+];
+function isTypedArrayName(name) {
+ return typedArrayTypeNames.includes(name);
+}
+const objectTypeNames = [
+ 'Function',
+ 'Generator',
+ 'AsyncGenerator',
+ 'GeneratorFunction',
+ 'AsyncGeneratorFunction',
+ 'AsyncFunction',
+ 'Observable',
+ 'Array',
+ 'Buffer',
+ 'Object',
+ 'RegExp',
+ 'Date',
+ 'Error',
+ 'Map',
+ 'Set',
+ 'WeakMap',
+ 'WeakSet',
+ 'ArrayBuffer',
+ 'SharedArrayBuffer',
+ 'DataView',
+ 'Promise',
+ 'URL',
+ 'HTMLElement',
+ ...typedArrayTypeNames
+];
+function isObjectTypeName(name) {
+ return objectTypeNames.includes(name);
+}
+const primitiveTypeNames = [
+ 'null',
+ 'undefined',
+ 'string',
+ 'number',
+ 'bigint',
+ 'boolean',
+ 'symbol'
+];
+function isPrimitiveTypeName(name) {
+ return primitiveTypeNames.includes(name);
}
-
-function HookSingular () {
- var singularHookName = 'h'
- var singularHookState = {
- registry: {}
- }
- var singularHook = register.bind(null, singularHookState, singularHookName)
- bindApi(singularHook, singularHookState, singularHookName)
- return singularHook
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isOfType(type) {
+ return (value) => typeof value === type;
}
+const { toString } = Object.prototype;
+const getObjectType = (value) => {
+ const objectTypeName = toString.call(value).slice(8, -1);
+ if (/HTML\w+Element/.test(objectTypeName) && is.domElement(value)) {
+ return 'HTMLElement';
+ }
+ if (isObjectTypeName(objectTypeName)) {
+ return objectTypeName;
+ }
+ return undefined;
+};
+const isObjectOfType = (type) => (value) => getObjectType(value) === type;
+function is(value) {
+ if (value === null) {
+ return 'null';
+ }
+ switch (typeof value) {
+ case 'undefined':
+ return 'undefined';
+ case 'string':
+ return 'string';
+ case 'number':
+ return 'number';
+ case 'boolean':
+ return 'boolean';
+ case 'function':
+ return 'Function';
+ case 'bigint':
+ return 'bigint';
+ case 'symbol':
+ return 'symbol';
+ default:
+ }
+ if (is.observable(value)) {
+ return 'Observable';
+ }
+ if (is.array(value)) {
+ return 'Array';
+ }
+ if (is.buffer(value)) {
+ return 'Buffer';
+ }
+ const tagType = getObjectType(value);
+ if (tagType) {
+ return tagType;
+ }
+ if (value instanceof String || value instanceof Boolean || value instanceof Number) {
+ throw new TypeError('Please don\'t use object wrappers for primitive types');
+ }
+ return 'Object';
+}
+is.undefined = isOfType('undefined');
+is.string = isOfType('string');
+const isNumberType = isOfType('number');
+is.number = (value) => isNumberType(value) && !is.nan(value);
+is.bigint = isOfType('bigint');
+// eslint-disable-next-line @typescript-eslint/ban-types
+is.function_ = isOfType('function');
+is.null_ = (value) => value === null;
+is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
+is.boolean = (value) => value === true || value === false;
+is.symbol = isOfType('symbol');
+is.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value));
+is.array = (value, assertion) => {
+ if (!Array.isArray(value)) {
+ return false;
+ }
+ if (!is.function_(assertion)) {
+ return true;
+ }
+ return value.every(assertion);
+};
+is.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; };
+is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
+is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value));
+is.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); };
+is.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); };
+is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
+is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw);
+is.nativePromise = (value) => isObjectOfType('Promise')(value);
+const hasPromiseAPI = (value) => {
+ var _a, _b;
+ return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.then) &&
+ is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.catch);
+};
+is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
+is.generatorFunction = isObjectOfType('GeneratorFunction');
+is.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction';
+is.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction';
+// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types
+is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
+is.regExp = isObjectOfType('RegExp');
+is.date = isObjectOfType('Date');
+is.error = isObjectOfType('Error');
+is.map = (value) => isObjectOfType('Map')(value);
+is.set = (value) => isObjectOfType('Set')(value);
+is.weakMap = (value) => isObjectOfType('WeakMap')(value);
+is.weakSet = (value) => isObjectOfType('WeakSet')(value);
+is.int8Array = isObjectOfType('Int8Array');
+is.uint8Array = isObjectOfType('Uint8Array');
+is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray');
+is.int16Array = isObjectOfType('Int16Array');
+is.uint16Array = isObjectOfType('Uint16Array');
+is.int32Array = isObjectOfType('Int32Array');
+is.uint32Array = isObjectOfType('Uint32Array');
+is.float32Array = isObjectOfType('Float32Array');
+is.float64Array = isObjectOfType('Float64Array');
+is.bigInt64Array = isObjectOfType('BigInt64Array');
+is.bigUint64Array = isObjectOfType('BigUint64Array');
+is.arrayBuffer = isObjectOfType('ArrayBuffer');
+is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer');
+is.dataView = isObjectOfType('DataView');
+is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype;
+is.urlInstance = (value) => isObjectOfType('URL')(value);
+is.urlString = (value) => {
+ if (!is.string(value)) {
+ return false;
+ }
+ try {
+ new URL(value); // eslint-disable-line no-new
+ return true;
+ }
+ catch (_a) {
+ return false;
+ }
+};
+// TODO: Use the `not` operator with a type guard here when it's available.
+// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`
+is.truthy = (value) => Boolean(value);
+// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`
+is.falsy = (value) => !value;
+is.nan = (value) => Number.isNaN(value);
+is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value);
+is.integer = (value) => Number.isInteger(value);
+is.safeInteger = (value) => Number.isSafeInteger(value);
+is.plainObject = (value) => {
+ // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js
+ if (toString.call(value) !== '[object Object]') {
+ return false;
+ }
+ const prototype = Object.getPrototypeOf(value);
+ return prototype === null || prototype === Object.getPrototypeOf({});
+};
+is.typedArray = (value) => isTypedArrayName(getObjectType(value));
+const isValidLength = (value) => is.safeInteger(value) && value >= 0;
+is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
+is.inRange = (value, range) => {
+ if (is.number(range)) {
+ return value >= Math.min(0, range) && value <= Math.max(range, 0);
+ }
+ if (is.array(range) && range.length === 2) {
+ return value >= Math.min(...range) && value <= Math.max(...range);
+ }
+ throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
+};
+const NODE_TYPE_ELEMENT = 1;
+const DOM_PROPERTIES_TO_CHECK = [
+ 'innerHTML',
+ 'ownerDocument',
+ 'style',
+ 'attributes',
+ 'nodeValue'
+];
+is.domElement = (value) => {
+ return is.object(value) &&
+ value.nodeType === NODE_TYPE_ELEMENT &&
+ is.string(value.nodeName) &&
+ !is.plainObject(value) &&
+ DOM_PROPERTIES_TO_CHECK.every(property => property in value);
+};
+is.observable = (value) => {
+ var _a, _b, _c, _d;
+ if (!value) {
+ return false;
+ }
+ // eslint-disable-next-line no-use-extend-native/no-use-extend-native
+ if (value === ((_b = (_a = value)[Symbol.observable]) === null || _b === void 0 ? void 0 : _b.call(_a))) {
+ return true;
+ }
+ if (value === ((_d = (_c = value)['@@observable']) === null || _d === void 0 ? void 0 : _d.call(_c))) {
+ return true;
+ }
+ return false;
+};
+is.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value);
+is.infinite = (value) => value === Infinity || value === -Infinity;
+const isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder;
+is.evenInteger = isAbsoluteMod2(0);
+is.oddInteger = isAbsoluteMod2(1);
+is.emptyArray = (value) => is.array(value) && value.length === 0;
+is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
+is.emptyString = (value) => is.string(value) && value.length === 0;
+// TODO: Use `not ''` when the `not` operator is available.
+is.nonEmptyString = (value) => is.string(value) && value.length > 0;
+const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value);
+is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
+is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
+// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:
+// - https://github.com/Microsoft/TypeScript/pull/29317
+is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
+is.emptySet = (value) => is.set(value) && value.size === 0;
+is.nonEmptySet = (value) => is.set(value) && value.size > 0;
+is.emptyMap = (value) => is.map(value) && value.size === 0;
+is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
+const predicateOnArray = (method, predicate, values) => {
+ if (!is.function_(predicate)) {
+ throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
+ }
+ if (values.length === 0) {
+ throw new TypeError('Invalid number of values');
+ }
+ return method.call(values, predicate);
+};
+is.any = (predicate, ...values) => {
+ const predicates = is.array(predicate) ? predicate : [predicate];
+ return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));
+};
+is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
+const assertType = (condition, description, value) => {
+ if (!condition) {
+ throw new TypeError(`Expected value which is \`${description}\`, received value of type \`${is(value)}\`.`);
+ }
+};
+exports.assert = {
+ // Unknowns.
+ undefined: (value) => assertType(is.undefined(value), 'undefined', value),
+ string: (value) => assertType(is.string(value), 'string', value),
+ number: (value) => assertType(is.number(value), 'number', value),
+ bigint: (value) => assertType(is.bigint(value), 'bigint', value),
+ // eslint-disable-next-line @typescript-eslint/ban-types
+ function_: (value) => assertType(is.function_(value), 'Function', value),
+ null_: (value) => assertType(is.null_(value), 'null', value),
+ class_: (value) => assertType(is.class_(value), "Class" /* class_ */, value),
+ boolean: (value) => assertType(is.boolean(value), 'boolean', value),
+ symbol: (value) => assertType(is.symbol(value), 'symbol', value),
+ numericString: (value) => assertType(is.numericString(value), "string with a number" /* numericString */, value),
+ array: (value, assertion) => {
+ const assert = assertType;
+ assert(is.array(value), 'Array', value);
+ if (assertion) {
+ value.forEach(assertion);
+ }
+ },
+ buffer: (value) => assertType(is.buffer(value), 'Buffer', value),
+ nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* nullOrUndefined */, value),
+ object: (value) => assertType(is.object(value), 'Object', value),
+ iterable: (value) => assertType(is.iterable(value), "Iterable" /* iterable */, value),
+ asyncIterable: (value) => assertType(is.asyncIterable(value), "AsyncIterable" /* asyncIterable */, value),
+ generator: (value) => assertType(is.generator(value), 'Generator', value),
+ asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value),
+ nativePromise: (value) => assertType(is.nativePromise(value), "native Promise" /* nativePromise */, value),
+ promise: (value) => assertType(is.promise(value), 'Promise', value),
+ generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value),
+ asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value),
+ // eslint-disable-next-line @typescript-eslint/ban-types
+ asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value),
+ // eslint-disable-next-line @typescript-eslint/ban-types
+ boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value),
+ regExp: (value) => assertType(is.regExp(value), 'RegExp', value),
+ date: (value) => assertType(is.date(value), 'Date', value),
+ error: (value) => assertType(is.error(value), 'Error', value),
+ map: (value) => assertType(is.map(value), 'Map', value),
+ set: (value) => assertType(is.set(value), 'Set', value),
+ weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value),
+ weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value),
+ int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value),
+ uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value),
+ uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value),
+ int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value),
+ uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value),
+ int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value),
+ uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value),
+ float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value),
+ float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value),
+ bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value),
+ bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value),
+ arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value),
+ sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value),
+ dataView: (value) => assertType(is.dataView(value), 'DataView', value),
+ urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value),
+ urlString: (value) => assertType(is.urlString(value), "string with a URL" /* urlString */, value),
+ truthy: (value) => assertType(is.truthy(value), "truthy" /* truthy */, value),
+ falsy: (value) => assertType(is.falsy(value), "falsy" /* falsy */, value),
+ nan: (value) => assertType(is.nan(value), "NaN" /* nan */, value),
+ primitive: (value) => assertType(is.primitive(value), "primitive" /* primitive */, value),
+ integer: (value) => assertType(is.integer(value), "integer" /* integer */, value),
+ safeInteger: (value) => assertType(is.safeInteger(value), "integer" /* safeInteger */, value),
+ plainObject: (value) => assertType(is.plainObject(value), "plain object" /* plainObject */, value),
+ typedArray: (value) => assertType(is.typedArray(value), "TypedArray" /* typedArray */, value),
+ arrayLike: (value) => assertType(is.arrayLike(value), "array-like" /* arrayLike */, value),
+ domElement: (value) => assertType(is.domElement(value), "HTMLElement" /* domElement */, value),
+ observable: (value) => assertType(is.observable(value), 'Observable', value),
+ nodeStream: (value) => assertType(is.nodeStream(value), "Node.js Stream" /* nodeStream */, value),
+ infinite: (value) => assertType(is.infinite(value), "infinite number" /* infinite */, value),
+ emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* emptyArray */, value),
+ nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* nonEmptyArray */, value),
+ emptyString: (value) => assertType(is.emptyString(value), "empty string" /* emptyString */, value),
+ nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* nonEmptyString */, value),
+ emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* emptyStringOrWhitespace */, value),
+ emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* emptyObject */, value),
+ nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* nonEmptyObject */, value),
+ emptySet: (value) => assertType(is.emptySet(value), "empty set" /* emptySet */, value),
+ nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* nonEmptySet */, value),
+ emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* emptyMap */, value),
+ nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* nonEmptyMap */, value),
+ // Numbers.
+ evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* evenInteger */, value),
+ oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* oddInteger */, value),
+ // Two arguments.
+ directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* directInstanceOf */, instance),
+ inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* inRange */, value),
+ // Variadic functions.
+ any: (predicate, ...values) => assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* any */, values),
+ all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* all */, values)
+};
+// Some few keywords are reserved, but we'll populate them for Node.js users
+// See https://github.com/Microsoft/TypeScript/issues/2536
+Object.defineProperties(is, {
+ class: {
+ value: is.class_
+ },
+ function: {
+ value: is.function_
+ },
+ null: {
+ value: is.null_
+ }
+});
+Object.defineProperties(exports.assert, {
+ class: {
+ value: exports.assert.class_
+ },
+ function: {
+ value: exports.assert.function_
+ },
+ null: {
+ value: exports.assert.null_
+ }
+});
+exports.default = is;
+// For CommonJS default export support
+module.exports = is;
+module.exports.default = is;
+module.exports.assert = exports.assert;
-function HookCollection () {
- var state = {
- registry: {}
- }
-
- var hook = register.bind(null, state)
- bindApi(hook, state)
- return hook
-}
+/***/ }),
-var collectionHookDeprecationMessageDisplayed = false
-function Hook () {
- if (!collectionHookDeprecationMessageDisplayed) {
- console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
- collectionHookDeprecationMessageDisplayed = true
- }
- return HookCollection()
-}
+/***/ 8097:
+/***/ ((module, exports, __nccwpck_require__) => {
-Hook.Singular = HookSingular.bind()
-Hook.Collection = HookCollection.bind()
+"use strict";
-module.exports = Hook
-// expose constructors as a named property for TypeScript
-module.exports.Hook = Hook
-module.exports.Singular = Hook.Singular
-module.exports.Collection = Hook.Collection
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const defer_to_connect_1 = __nccwpck_require__(6214);
+const nodejsMajorVersion = Number(process.versions.node.split('.')[0]);
+const timer = (request) => {
+ const timings = {
+ start: Date.now(),
+ socket: undefined,
+ lookup: undefined,
+ connect: undefined,
+ secureConnect: undefined,
+ upload: undefined,
+ response: undefined,
+ end: undefined,
+ error: undefined,
+ abort: undefined,
+ phases: {
+ wait: undefined,
+ dns: undefined,
+ tcp: undefined,
+ tls: undefined,
+ request: undefined,
+ firstByte: undefined,
+ download: undefined,
+ total: undefined
+ }
+ };
+ request.timings = timings;
+ const handleError = (origin) => {
+ const emit = origin.emit.bind(origin);
+ origin.emit = (event, ...args) => {
+ // Catches the `error` event
+ if (event === 'error') {
+ timings.error = Date.now();
+ timings.phases.total = timings.error - timings.start;
+ origin.emit = emit;
+ }
+ // Saves the original behavior
+ return emit(event, ...args);
+ };
+ };
+ handleError(request);
+ request.prependOnceListener('abort', () => {
+ timings.abort = Date.now();
+ // Let the `end` response event be responsible for setting the total phase,
+ // unless the Node.js major version is >= 13.
+ if (!timings.response || nodejsMajorVersion >= 13) {
+ timings.phases.total = Date.now() - timings.start;
+ }
+ });
+ const onSocket = (socket) => {
+ timings.socket = Date.now();
+ timings.phases.wait = timings.socket - timings.start;
+ const lookupListener = () => {
+ timings.lookup = Date.now();
+ timings.phases.dns = timings.lookup - timings.socket;
+ };
+ socket.prependOnceListener('lookup', lookupListener);
+ defer_to_connect_1.default(socket, {
+ connect: () => {
+ timings.connect = Date.now();
+ if (timings.lookup === undefined) {
+ socket.removeListener('lookup', lookupListener);
+ timings.lookup = timings.connect;
+ timings.phases.dns = timings.lookup - timings.socket;
+ }
+ timings.phases.tcp = timings.connect - timings.lookup;
+ // This callback is called before flushing any data,
+ // so we don't need to set `timings.phases.request` here.
+ },
+ secureConnect: () => {
+ timings.secureConnect = Date.now();
+ timings.phases.tls = timings.secureConnect - timings.connect;
+ }
+ });
+ };
+ if (request.socket) {
+ onSocket(request.socket);
+ }
+ else {
+ request.prependOnceListener('socket', onSocket);
+ }
+ const onUpload = () => {
+ var _a;
+ timings.upload = Date.now();
+ timings.phases.request = timings.upload - (_a = timings.secureConnect, (_a !== null && _a !== void 0 ? _a : timings.connect));
+ };
+ const writableFinished = () => {
+ if (typeof request.writableFinished === 'boolean') {
+ return request.writableFinished;
+ }
+ // Node.js doesn't have `request.writableFinished` property
+ return request.finished && request.outputSize === 0 && (!request.socket || request.socket.writableLength === 0);
+ };
+ if (writableFinished()) {
+ onUpload();
+ }
+ else {
+ request.prependOnceListener('finish', onUpload);
+ }
+ request.prependOnceListener('response', (response) => {
+ timings.response = Date.now();
+ timings.phases.firstByte = timings.response - timings.upload;
+ response.timings = timings;
+ handleError(response);
+ response.prependOnceListener('end', () => {
+ timings.end = Date.now();
+ timings.phases.download = timings.end - timings.response;
+ timings.phases.total = timings.end - timings.start;
+ });
+ });
+ return timings;
+};
+exports.default = timer;
+// For CommonJS default export support
+module.exports = timer;
+module.exports.default = timer;
/***/ }),
-/***/ 5549:
-/***/ ((module) => {
-
-module.exports = addHook
+/***/ 6761:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-function addHook (state, kind, name, hook) {
- var orig = hook
- if (!state.registry[name]) {
- state.registry[name] = []
- }
+var Utils = __nccwpck_require__(5182);
+var fs = Utils.FileSystem.require(),
+ pth = __nccwpck_require__(5622);
- if (kind === 'before') {
- hook = function (method, options) {
- return Promise.resolve()
- .then(orig.bind(null, options))
- .then(method.bind(null, options))
- }
- }
+fs.existsSync = fs.existsSync || pth.existsSync;
- if (kind === 'after') {
- hook = function (method, options) {
- var result
- return Promise.resolve()
- .then(method.bind(null, options))
- .then(function (result_) {
- result = result_
- return orig(result, options)
- })
- .then(function () {
- return result
- })
- }
- }
+var ZipEntry = __nccwpck_require__(4057),
+ ZipFile = __nccwpck_require__(7744);
- if (kind === 'error') {
- hook = function (method, options) {
- return Promise.resolve()
- .then(method.bind(null, options))
- .catch(function (error) {
- return orig(error, options)
- })
- }
- }
+var isWin = /^win/.test(process.platform);
- state.registry[name].push({
- hook: hook,
- orig: orig
- })
+function canonical(p) {
+ var safeSuffix = pth.normalize(p).replace(/^(\.\.(\/|\\|$))+/, '');
+ return pth.join('./', safeSuffix);
}
+module.exports = function (/**String*/input) {
+ var _zip = undefined,
+ _filename = "";
-/***/ }),
+ if (input && typeof input === "string") { // load zip file
+ if (fs.existsSync(input)) {
+ _filename = input;
+ _zip = new ZipFile(input, Utils.Constants.FILE);
+ } else {
+ throw new Error(Utils.Errors.INVALID_FILENAME);
+ }
+ } else if (input && Buffer.isBuffer(input)) { // load buffer
+ _zip = new ZipFile(input, Utils.Constants.BUFFER);
+ } else { // create new zip file
+ _zip = new ZipFile(null, Utils.Constants.NONE);
+ }
-/***/ 4670:
-/***/ ((module) => {
+ function sanitize(prefix, name) {
+ prefix = pth.resolve(pth.normalize(prefix));
+ var parts = name.split('/');
+ for (var i = 0, l = parts.length; i < l; i++) {
+ var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
+ if (path.indexOf(prefix) === 0) {
+ return path;
+ }
+ }
+ return pth.normalize(pth.join(prefix, pth.basename(name)));
+ }
-module.exports = register
+ function getEntry(/**Object*/entry) {
+ if (entry && _zip) {
+ var item;
+ // If entry was given as a file name
+ if (typeof entry === "string")
+ item = _zip.getEntry(entry);
+ // if entry was given as a ZipEntry object
+ if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined")
+ item = _zip.getEntry(entry.entryName);
+
+ if (item) {
+ return item;
+ }
+ }
+ return null;
+ }
-function register (state, name, method, options) {
- if (typeof method !== 'function') {
- throw new Error('method for before hook must be a function')
- }
+ function fixPath(zipPath){
+ // convert windows file separators
+ zipPath = zipPath.split("\\").join("/");
+ // add separator if it wasnt given
+ if (zipPath.charAt(zipPath.length - 1) !== "/") {
+ zipPath += "/";
+ }
+ return zipPath;
+ }
+
+ return {
+ /**
+ * Extracts the given entry from the archive and returns the content as a Buffer object
+ * @param entry ZipEntry object or String with the full path of the entry
+ *
+ * @return Buffer or Null in case of error
+ */
+ readFile: function (/**Object*/entry, /*String, Buffer*/pass) {
+ var item = getEntry(entry);
+ return item && item.getData(pass) || null;
+ },
+
+ /**
+ * Asynchronous readFile
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param callback
+ *
+ * @return Buffer or Null in case of error
+ */
+ readFileAsync: function (/**Object*/entry, /**Function*/callback) {
+ var item = getEntry(entry);
+ if (item) {
+ item.getDataAsync(callback);
+ } else {
+ callback(null, "getEntry failed for:" + entry)
+ }
+ },
+
+ /**
+ * Extracts the given entry from the archive and returns the content as plain text in the given encoding
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param encoding Optional. If no encoding is specified utf8 is used
+ *
+ * @return String
+ */
+ readAsText: function (/**Object*/entry, /**String=*/encoding) {
+ var item = getEntry(entry);
+ if (item) {
+ var data = item.getData();
+ if (data && data.length) {
+ return data.toString(encoding || "utf8");
+ }
+ }
+ return "";
+ },
+
+ /**
+ * Asynchronous readAsText
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param callback
+ * @param encoding Optional. If no encoding is specified utf8 is used
+ *
+ * @return String
+ */
+ readAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) {
+ var item = getEntry(entry);
+ if (item) {
+ item.getDataAsync(function (data, err) {
+ if (err) {
+ callback(data, err);
+ return;
+ }
- if (!options) {
- options = {}
- }
+ if (data && data.length) {
+ callback(data.toString(encoding || "utf8"));
+ } else {
+ callback("");
+ }
+ })
+ } else {
+ callback("");
+ }
+ },
+
+ /**
+ * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
+ *
+ * @param entry
+ */
+ deleteFile: function (/**Object*/entry) { // @TODO: test deleteFile
+ var item = getEntry(entry);
+ if (item) {
+ _zip.deleteEntry(item.entryName);
+ }
+ },
+
+ /**
+ * Adds a comment to the zip. The zip must be rewritten after adding the comment.
+ *
+ * @param comment
+ */
+ addZipComment: function (/**String*/comment) { // @TODO: test addZipComment
+ _zip.comment = comment;
+ },
+
+ /**
+ * Returns the zip comment
+ *
+ * @return String
+ */
+ getZipComment: function () {
+ return _zip.comment || '';
+ },
+
+ /**
+ * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
+ * The comment cannot exceed 65535 characters in length
+ *
+ * @param entry
+ * @param comment
+ */
+ addZipEntryComment: function (/**Object*/entry, /**String*/comment) {
+ var item = getEntry(entry);
+ if (item) {
+ item.comment = comment;
+ }
+ },
+
+ /**
+ * Returns the comment of the specified entry
+ *
+ * @param entry
+ * @return String
+ */
+ getZipEntryComment: function (/**Object*/entry) {
+ var item = getEntry(entry);
+ if (item) {
+ return item.comment || '';
+ }
+ return ''
+ },
+
+ /**
+ * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
+ *
+ * @param entry
+ * @param content
+ */
+ updateFile: function (/**Object*/entry, /**Buffer*/content) {
+ var item = getEntry(entry);
+ if (item) {
+ item.setData(content);
+ }
+ },
+
+ /**
+ * Adds a file from the disk to the archive
+ *
+ * @param localPath File to add to zip
+ * @param zipPath Optional path inside the zip
+ * @param zipName Optional name for the file
+ */
+ addLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) {
+ if (fs.existsSync(localPath)) {
+ // fix ZipPath
+ zipPath = (zipPath) ? fixPath(zipPath) : "";
+
+ // p - local file name
+ var p = localPath.split("\\").join("/").split("/").pop();
+
+ // add file name into zippath
+ zipPath += (zipName) ? zipName : p;
+
+ // read file attributes
+ const _attr = fs.statSync(localPath);
+
+ // add file into zip file
+ this.addFile(zipPath, fs.readFileSync(localPath), comment, _attr)
+ } else {
+ throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ }
+ },
+
+ /**
+ * Adds a local directory and all its nested files and directories to the archive
+ *
+ * @param localPath
+ * @param zipPath optional path inside zip
+ * @param filter optional RegExp or Function if files match will
+ * be included.
+ */
+ addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) {
+ // Prepare filter
+ if (filter instanceof RegExp) { // if filter is RegExp wrap it
+ filter = (function (rx){
+ return function (filename) {
+ return rx.test(filename);
+ }
+ })(filter);
+ } else if ('function' !== typeof filter) { // if filter is not function we will replace it
+ filter = function () {
+ return true;
+ };
+ }
- if (Array.isArray(name)) {
- return name.reverse().reduce(function (callback, name) {
- return register.bind(null, state, name, callback, options)
- }, method)()
- }
+ // fix ZipPath
+ zipPath = (zipPath) ? fixPath(zipPath) : "";
- return Promise.resolve()
- .then(function () {
- if (!state.registry[name]) {
- return method(options)
- }
+ // normalize the path first
+ localPath = pth.normalize(localPath);
- return (state.registry[name]).reduce(function (method, registered) {
- return registered.hook.bind(null, method, options)
- }, method)()
- })
-}
+ if (fs.existsSync(localPath)) {
+ var items = Utils.findFiles(localPath),
+ self = this;
-/***/ }),
+ if (items.length) {
+ items.forEach(function (filepath) {
+ var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
+ if (filter(p)) {
+ if (filepath.charAt(filepath.length - 1) !== pth.sep) {
+ self.addFile(zipPath + p, fs.readFileSync(filepath), "", fs.statSync(filepath));
+ } else {
+ self.addFile(zipPath + p + '/', Buffer.alloc(0), "", 0);
+ }
+ }
+ });
+ }
+ } else {
+ throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ }
+ },
-/***/ 6819:
-/***/ ((module) => {
+ /**
+ * Asynchronous addLocalFile
+ * @param localPath
+ * @param callback
+ * @param zipPath optional path inside zip
+ * @param filter optional RegExp or Function if files match will
+ * be included.
+ */
+ addLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) {
+ if (filter === undefined) {
+ filter = function () {
+ return true;
+ };
+ } else if (filter instanceof RegExp) {
+ filter = function (filter) {
+ return function (filename) {
+ return filter.test(filename);
+ }
+ }(filter);
+ }
-module.exports = removeHook
+ if (zipPath) {
+ zipPath = zipPath.split("\\").join("/");
+ if (zipPath.charAt(zipPath.length - 1) !== "/") {
+ zipPath += "/";
+ }
+ } else {
+ zipPath = "";
+ }
+ // normalize the path first
+ localPath = pth.normalize(localPath);
+ localPath = localPath.split("\\").join("/"); //windows fix
+ if (localPath.charAt(localPath.length - 1) !== "/")
+ localPath += "/";
+
+ var self = this;
+ fs.open(localPath, 'r', function (err, fd) {
+ if (err && err.code === 'ENOENT') {
+ callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ } else if (err) {
+ callback(undefined, err);
+ } else {
+ var items = Utils.findFiles(localPath);
+ var i = -1;
+
+ var next = function () {
+ i += 1;
+ if (i < items.length) {
+ var p = items[i].split("\\").join("/").replace(new RegExp(localPath.replace(/(\(|\))/g, '\\$1'), 'i'), ""); //windows fix
+ p = p.normalize('NFD').replace(/[\u0300-\u036f]/g, '').replace(/[^\x20-\x7E]/g, '') // accent fix
+ if (filter(p)) {
+ if (p.charAt(p.length - 1) !== "/") {
+ fs.readFile(items[i], function (err, data) {
+ if (err) {
+ callback(undefined, err);
+ } else {
+ self.addFile(zipPath + p, data, '', 0);
+ next();
+ }
+ })
+ } else {
+ self.addFile(zipPath + p, Buffer.alloc(0), "", 0);
+ next();
+ }
+ } else {
+ next();
+ }
-function removeHook (state, name, method) {
- if (!state.registry[name]) {
- return
- }
+ } else {
+ callback(true, undefined);
+ }
+ }
- var index = state.registry[name]
- .map(function (registered) { return registered.orig })
- .indexOf(method)
+ next();
+ }
+ });
+ },
+
+ /**
+ * Allows you to create a entry (file or directory) in the zip file.
+ * If you want to create a directory the entryName must end in / and a null buffer should be provided.
+ * Comment and attributes are optional
+ *
+ * @param entryName
+ * @param content
+ * @param comment
+ * @param attr
+ */
+ addFile: function (/**String*/entryName, /**Buffer*/content, /**String*/comment, /**Number*/attr) {
+ // prepare new entry
+ var entry = new ZipEntry();
+ entry.entryName = entryName;
+ entry.comment = comment || "";
+
+ var isStat = ('object' === typeof attr) && (attr instanceof fs.Stats);
+
+ // last modification time from file stats
+ if (isStat){
+ entry.header.time = attr.mtime;
+ }
- if (index === -1) {
- return
- }
+ // Set file attribute
+ var fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag)
- state.registry[name].splice(index, 1)
-}
+ // extended attributes field for Unix
+ if('win32' !== process.platform){
+ // set file type either S_IFDIR / S_IFREG
+ var unix = (entry.isDirectory) ? 0x4000 : 0x8000;
+ if (isStat) { // File attributes from file stats
+ unix |= (0xfff & attr.mode)
+ }else if ('number' === typeof attr){ // attr from given attr values
+ unix |= (0xfff & attr);
+ }else{ // Default values:
+ unix |= (entry.isDirectory) ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
+ }
-/***/ }),
+ fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
+ }
-/***/ 8932:
-/***/ ((__unused_webpack_module, exports) => {
+ entry.attr = fileattr;
-"use strict";
+ entry.setData(content);
+ _zip.setEntry(entry);
+ },
+ /**
+ * Returns an array of ZipEntry objects representing the files and folders inside the archive
+ *
+ * @return Array
+ */
+ getEntries: function () {
+ if (_zip) {
+ return _zip.entries;
+ } else {
+ return [];
+ }
+ },
+
+ /**
+ * Returns a ZipEntry object representing the file or folder specified by ``name``.
+ *
+ * @param name
+ * @return ZipEntry
+ */
+ getEntry: function (/**String*/name) {
+ return getEntry(name);
+ },
+
+ getEntryCount: function() {
+ return _zip.getEntryCount();
+ },
+
+ forEach: function(callback) {
+ return _zip.forEach(callback);
+ },
+
+ /**
+ * Extracts the given entry to the given targetPath
+ * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
+ *
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param targetPath Target folder where to write the file
+ * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder
+ * will be created in targetPath as well. Default is TRUE
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)
+ *
+ * @return Boolean
+ */
+ extractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) {
+ overwrite = overwrite || false;
+ maintainEntryPath = typeof maintainEntryPath === "undefined" ? true : maintainEntryPath;
+
+ var item = getEntry(entry);
+ if (!item) {
+ throw new Error(Utils.Errors.NO_ENTRY);
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+ var entryName = canonical(item.entryName);
-class Deprecation extends Error {
- constructor(message) {
- super(message); // Maintains proper stack trace (only available on V8)
+ var target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName)));
- /* istanbul ignore next */
+ if (item.isDirectory) {
+ target = pth.resolve(target, "..");
+ var children = _zip.getEntryChildren(item);
+ children.forEach(function (child) {
+ if (child.isDirectory) return;
+ var content = child.getData();
+ if (!content) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ var name = canonical(child.entryName)
+ var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
- }
+ Utils.writeFileTo(childName, content, overwrite);
+ });
+ return true;
+ }
- this.name = 'Deprecation';
- }
+ var content = item.getData();
+ if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
-}
+ if (fs.existsSync(target) && !overwrite) {
+ throw new Error(Utils.Errors.CANT_OVERRIDE);
+ }
+ Utils.writeFileTo(target, content, overwrite);
+
+ return true;
+ },
+
+ /**
+ * Test the archive
+ *
+ */
+ test: function (pass) {
+ if (!_zip) {
+ return false;
+ }
-exports.Deprecation = Deprecation;
+ for (var entry in _zip.entries) {
+ try {
+ if (entry.isDirectory) {
+ continue;
+ }
+ var content = _zip.entries[entry].getData(pass);
+ if (!content) {
+ return false;
+ }
+ } catch (err) {
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * Extracts the entire archive to the given location
+ *
+ * @param targetPath Target location
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ */
+ extractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) {
+ overwrite = overwrite || false;
+ if (!_zip) {
+ throw new Error(Utils.Errors.NO_ZIP);
+ }
+ _zip.entries.forEach(function (entry) {
+ var entryName = sanitize(targetPath, canonical(entry.entryName.toString()));
+ if (entry.isDirectory) {
+ Utils.makeDir(entryName);
+ return;
+ }
+ var content = entry.getData(pass);
+ if (!content) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ Utils.writeFileTo(entryName, content, overwrite);
+ try {
+ fs.utimesSync(entryName, entry.header.time, entry.header.time)
+ } catch (err) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ })
+ },
+
+ /**
+ * Asynchronous extractAllTo
+ *
+ * @param targetPath Target location
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ * @param callback
+ */
+ extractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) {
+ if (!callback) {
+ callback = function() {}
+ }
+ overwrite = overwrite || false;
+ if (!_zip) {
+ callback(new Error(Utils.Errors.NO_ZIP));
+ return;
+ }
+ var entries = _zip.entries;
+ var i = entries.length;
+ entries.forEach(function (entry) {
+ if (i <= 0) return; // Had an error already
-/***/ }),
+ var entryName = pth.normalize(canonical(entry.entryName.toString()));
-/***/ 5582:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ if (entry.isDirectory) {
+ Utils.makeDir(sanitize(targetPath, entryName));
+ if (--i === 0)
+ callback(undefined);
+ return;
+ }
+ entry.getDataAsync(function (content, err) {
+ if (i <= 0) return;
+ if (err) {
+ callback(new Error(err));
+ return;
+ }
+ if (!content) {
+ i = 0;
+ callback(new Error(Utils.Errors.CANT_EXTRACT_FILE));
+ return;
+ }
-"use strict";
+ Utils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, function (succ) {
+ try {
+ fs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time);
+ } catch (err) {
+ callback(new Error('Unable to set utimes'));
+ }
+ if (i <= 0) return;
+ if (!succ) {
+ i = 0;
+ callback(new Error('Unable to write'));
+ return;
+ }
+ if (--i === 0)
+ callback(undefined);
+ });
+ });
+ })
+ },
+
+ /**
+ * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
+ *
+ * @param targetFileName
+ * @param callback
+ */
+ writeZip: function (/**String*/targetFileName, /**Function*/callback) {
+ if (arguments.length === 1) {
+ if (typeof targetFileName === "function") {
+ callback = targetFileName;
+ targetFileName = "";
+ }
+ }
+ if (!targetFileName && _filename) {
+ targetFileName = _filename;
+ }
+ if (!targetFileName) return;
-const stringify = __nccwpck_require__(4810);
-const compile = __nccwpck_require__(7123);
-const expand = __nccwpck_require__(6944);
-const parse = __nccwpck_require__(9889);
+ var zipData = _zip.compressToBuffer();
+ if (zipData) {
+ var ok = Utils.writeFileTo(targetFileName, zipData, true);
+ if (typeof callback === 'function') callback(!ok ? new Error("failed") : null, "");
+ }
+ },
+
+ /**
+ * Returns the content of the entire zip file as a Buffer object
+ *
+ * @return Buffer
+ */
+ toBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) {
+ this.valueOf = 2;
+ if (typeof onSuccess === "function") {
+ _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
+ return null;
+ }
+ return _zip.compressToBuffer()
+ }
+ }
+};
-/**
- * Expand the given pattern or create a regex-compatible string.
- *
- * ```js
- * const braces = require('braces');
- * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
- * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
- * ```
- * @param {String} `str`
- * @param {Object} `options`
- * @return {String}
- * @api public
- */
-const braces = (input, options = {}) => {
- let output = [];
+/***/ }),
- if (Array.isArray(input)) {
- for (let pattern of input) {
- let result = braces.create(pattern, options);
- if (Array.isArray(result)) {
- output.push(...result);
- } else {
- output.push(result);
- }
+/***/ 9032:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var Utils = __nccwpck_require__(5182),
+ Constants = Utils.Constants;
+
+/* The central directory file header */
+module.exports = function () {
+ var _verMade = 0x14,
+ _version = 0x0A,
+ _flags = 0,
+ _method = 0,
+ _time = 0,
+ _crc = 0,
+ _compressedSize = 0,
+ _size = 0,
+ _fnameLen = 0,
+ _extraLen = 0,
+
+ _comLen = 0,
+ _diskStart = 0,
+ _inattr = 0,
+ _attr = 0,
+ _offset = 0;
+
+ switch(process.platform){
+ case 'win32':
+ _verMade |= 0x0A00;
+ case 'darwin':
+ _verMade |= 0x1300;
+ default:
+ _verMade |= 0x0300;
}
- } else {
- output = [].concat(braces.create(input, options));
- }
- if (options && options.expand === true && options.nodupes === true) {
- output = [...new Set(output)];
- }
- return output;
-};
+ var _dataHeader = {};
-/**
- * Parse the given `str` with the given `options`.
- *
- * ```js
- * // braces.parse(pattern, [, options]);
- * const ast = braces.parse('a/{b,c}/d');
- * console.log(ast);
- * ```
- * @param {String} pattern Brace pattern to parse
- * @param {Object} options
- * @return {Object} Returns an AST
- * @api public
- */
+ function setTime(val) {
+ val = new Date(val);
+ _time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980
+ | (val.getMonth() + 1) << 21 // b05-08 month
+ | val.getDate() << 16 // b00-04 hour
-braces.parse = (input, options = {}) => parse(input, options);
+ // 2 bytes time
+ | val.getHours() << 11 // b11-15 hour
+ | val.getMinutes() << 5 // b05-10 minute
+ | val.getSeconds() >> 1; // b00-04 seconds divided by 2
+ }
-/**
- * Creates a braces string from an AST, or an AST node.
- *
- * ```js
- * const braces = require('braces');
- * let ast = braces.parse('foo/{a,b}/bar');
- * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
- * ```
- * @param {String} `input` Brace pattern or AST.
- * @param {Object} `options`
- * @return {Array} Returns an array of expanded values.
- * @api public
- */
+ setTime(+new Date());
-braces.stringify = (input, options = {}) => {
- if (typeof input === 'string') {
- return stringify(braces.parse(input, options), options);
- }
- return stringify(input, options);
-};
+ return {
+ get made () { return _verMade; },
+ set made (val) { _verMade = val; },
+
+ get version () { return _version; },
+ set version (val) { _version = val },
+
+ get flags () { return _flags },
+ set flags (val) { _flags = val; },
+
+ get method () { return _method; },
+ set method (val) {
+ switch (val){
+ case Constants.STORED:
+ this.version = 10;
+ case Constants.DEFLATED:
+ default:
+ this.version = 20;
+ }
+ _method = val;
+ },
+
+ get time () { return new Date(
+ ((_time >> 25) & 0x7f) + 1980,
+ ((_time >> 21) & 0x0f) - 1,
+ (_time >> 16) & 0x1f,
+ (_time >> 11) & 0x1f,
+ (_time >> 5) & 0x3f,
+ (_time & 0x1f) << 1
+ );
+ },
+ set time (val) {
+ setTime(val);
+ },
-/**
- * Compiles a brace pattern into a regex-compatible, optimized string.
- * This method is called by the main [braces](#braces) function by default.
- *
- * ```js
- * const braces = require('braces');
- * console.log(braces.compile('a/{b,c}/d'));
- * //=> ['a/(b|c)/d']
- * ```
- * @param {String} `input` Brace pattern or AST.
- * @param {Object} `options`
- * @return {Array} Returns an array of expanded values.
- * @api public
- */
+ get crc () { return _crc; },
+ set crc (val) { _crc = val; },
-braces.compile = (input, options = {}) => {
- if (typeof input === 'string') {
- input = braces.parse(input, options);
- }
- return compile(input, options);
-};
+ get compressedSize () { return _compressedSize; },
+ set compressedSize (val) { _compressedSize = val; },
-/**
- * Expands a brace pattern into an array. This method is called by the
- * main [braces](#braces) function when `options.expand` is true. Before
- * using this method it's recommended that you read the [performance notes](#performance))
- * and advantages of using [.compile](#compile) instead.
- *
- * ```js
- * const braces = require('braces');
- * console.log(braces.expand('a/{b,c}/d'));
- * //=> ['a/b/d', 'a/c/d'];
- * ```
- * @param {String} `pattern` Brace pattern
- * @param {Object} `options`
- * @return {Array} Returns an array of expanded values.
- * @api public
- */
+ get size () { return _size; },
+ set size (val) { _size = val; },
-braces.expand = (input, options = {}) => {
- if (typeof input === 'string') {
- input = braces.parse(input, options);
- }
+ get fileNameLength () { return _fnameLen; },
+ set fileNameLength (val) { _fnameLen = val; },
- let result = expand(input, options);
+ get extraLength () { return _extraLen },
+ set extraLength (val) { _extraLen = val; },
- // filter out empty strings if specified
- if (options.noempty === true) {
- result = result.filter(Boolean);
- }
+ get commentLength () { return _comLen },
+ set commentLength (val) { _comLen = val },
- // filter out duplicates if specified
- if (options.nodupes === true) {
- result = [...new Set(result)];
- }
+ get diskNumStart () { return _diskStart },
+ set diskNumStart (val) { _diskStart = val },
- return result;
-};
+ get inAttr () { return _inattr },
+ set inAttr (val) { _inattr = val },
-/**
- * Processes a brace pattern and returns either an expanded array
- * (if `options.expand` is true), a highly optimized regex-compatible string.
- * This method is called by the main [braces](#braces) function.
- *
- * ```js
- * const braces = require('braces');
- * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
- * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
- * ```
- * @param {String} `pattern` Brace pattern
- * @param {Object} `options`
- * @return {Array} Returns an array of expanded values.
- * @api public
- */
+ get attr () { return _attr },
+ set attr (val) { _attr = val },
-braces.create = (input, options = {}) => {
- if (input === '' || input.length < 3) {
- return [input];
- }
+ get offset () { return _offset },
+ set offset (val) { _offset = val },
- return options.expand !== true
- ? braces.compile(input, options)
- : braces.expand(input, options);
-};
+ get encripted () { return (_flags & 1) === 1 },
-/**
- * Expose "braces"
- */
+ get entryHeaderSize () {
+ return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
+ },
-module.exports = braces;
+ get realDataOffset () {
+ return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
+ },
+ get dataHeader () {
+ return _dataHeader;
+ },
-/***/ }),
+ loadDataHeaderFromBinary : function(/*Buffer*/input) {
+ var data = input.slice(_offset, _offset + Constants.LOCHDR);
+ // 30 bytes and should start with "PK\003\004"
+ if (data.readUInt32LE(0) !== Constants.LOCSIG) {
+ throw new Error(Utils.Errors.INVALID_LOC);
+ }
+ _dataHeader = {
+ // version needed to extract
+ version : data.readUInt16LE(Constants.LOCVER),
+ // general purpose bit flag
+ flags : data.readUInt16LE(Constants.LOCFLG),
+ // compression method
+ method : data.readUInt16LE(Constants.LOCHOW),
+ // modification time (2 bytes time, 2 bytes date)
+ time : data.readUInt32LE(Constants.LOCTIM),
+ // uncompressed file crc-32 value
+ crc : data.readUInt32LE(Constants.LOCCRC),
+ // compressed size
+ compressedSize : data.readUInt32LE(Constants.LOCSIZ),
+ // uncompressed size
+ size : data.readUInt32LE(Constants.LOCLEN),
+ // filename length
+ fnameLen : data.readUInt16LE(Constants.LOCNAM),
+ // extra field length
+ extraLen : data.readUInt16LE(Constants.LOCEXT)
+ }
+ },
-/***/ 7123:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ loadFromBinary : function(/*Buffer*/data) {
+ // data should be 46 bytes and start with "PK 01 02"
+ if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
+ throw new Error(Utils.Errors.INVALID_CEN);
+ }
+ // version made by
+ _verMade = data.readUInt16LE(Constants.CENVEM);
+ // version needed to extract
+ _version = data.readUInt16LE(Constants.CENVER);
+ // encrypt, decrypt flags
+ _flags = data.readUInt16LE(Constants.CENFLG);
+ // compression method
+ _method = data.readUInt16LE(Constants.CENHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ _time = data.readUInt32LE(Constants.CENTIM);
+ // uncompressed file crc-32 value
+ _crc = data.readUInt32LE(Constants.CENCRC);
+ // compressed size
+ _compressedSize = data.readUInt32LE(Constants.CENSIZ);
+ // uncompressed size
+ _size = data.readUInt32LE(Constants.CENLEN);
+ // filename length
+ _fnameLen = data.readUInt16LE(Constants.CENNAM);
+ // extra field length
+ _extraLen = data.readUInt16LE(Constants.CENEXT);
+ // file comment length
+ _comLen = data.readUInt16LE(Constants.CENCOM);
+ // volume number start
+ _diskStart = data.readUInt16LE(Constants.CENDSK);
+ // internal file attributes
+ _inattr = data.readUInt16LE(Constants.CENATT);
+ // external file attributes
+ _attr = data.readUInt32LE(Constants.CENATX);
+ // LOC header offset
+ _offset = data.readUInt32LE(Constants.CENOFF);
+ },
-"use strict";
+ dataHeaderToBinary : function() {
+ // LOC header size (30 bytes)
+ var data = Buffer.alloc(Constants.LOCHDR);
+ // "PK\003\004"
+ data.writeUInt32LE(Constants.LOCSIG, 0);
+ // version needed to extract
+ data.writeUInt16LE(_version, Constants.LOCVER);
+ // general purpose bit flag
+ data.writeUInt16LE(_flags, Constants.LOCFLG);
+ // compression method
+ data.writeUInt16LE(_method, Constants.LOCHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ data.writeUInt32LE(_time, Constants.LOCTIM);
+ // uncompressed file crc-32 value
+ data.writeUInt32LE(_crc, Constants.LOCCRC);
+ // compressed size
+ data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
+ // uncompressed size
+ data.writeUInt32LE(_size, Constants.LOCLEN);
+ // filename length
+ data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
+ // extra field length
+ data.writeUInt16LE(_extraLen, Constants.LOCEXT);
+ return data;
+ },
+ entryHeaderToBinary : function() {
+ // CEN header size (46 bytes)
+ var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
+ // "PK\001\002"
+ data.writeUInt32LE(Constants.CENSIG, 0);
+ // version made by
+ data.writeUInt16LE(_verMade, Constants.CENVEM);
+ // version needed to extract
+ data.writeUInt16LE(_version, Constants.CENVER);
+ // encrypt, decrypt flags
+ data.writeUInt16LE(_flags, Constants.CENFLG);
+ // compression method
+ data.writeUInt16LE(_method, Constants.CENHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ data.writeUInt32LE(_time, Constants.CENTIM);
+ // uncompressed file crc-32 value
+ data.writeUInt32LE(_crc, Constants.CENCRC);
+ // compressed size
+ data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
+ // uncompressed size
+ data.writeUInt32LE(_size, Constants.CENLEN);
+ // filename length
+ data.writeUInt16LE(_fnameLen, Constants.CENNAM);
+ // extra field length
+ data.writeUInt16LE(_extraLen, Constants.CENEXT);
+ // file comment length
+ data.writeUInt16LE(_comLen, Constants.CENCOM);
+ // volume number start
+ data.writeUInt16LE(_diskStart, Constants.CENDSK);
+ // internal file attributes
+ data.writeUInt16LE(_inattr, Constants.CENATT);
+ // external file attributes
+ data.writeUInt32LE(_attr, Constants.CENATX);
+ // LOC header offset
+ data.writeUInt32LE(_offset, Constants.CENOFF);
+ // fill all with
+ data.fill(0x00, Constants.CENHDR);
+ return data;
+ },
-const fill = __nccwpck_require__(791);
-const utils = __nccwpck_require__(7691);
+ toString : function() {
+ return '{\n' +
+ '\t"made" : ' + _verMade + ",\n" +
+ '\t"version" : ' + _version + ",\n" +
+ '\t"flags" : ' + _flags + ",\n" +
+ '\t"method" : ' + Utils.methodToString(_method) + ",\n" +
+ '\t"time" : ' + this.time + ",\n" +
+ '\t"crc" : 0x' + _crc.toString(16).toUpperCase() + ",\n" +
+ '\t"compressedSize" : ' + _compressedSize + " bytes,\n" +
+ '\t"size" : ' + _size + " bytes,\n" +
+ '\t"fileNameLength" : ' + _fnameLen + ",\n" +
+ '\t"extraLength" : ' + _extraLen + " bytes,\n" +
+ '\t"commentLength" : ' + _comLen + " bytes,\n" +
+ '\t"diskNumStart" : ' + _diskStart + ",\n" +
+ '\t"inAttr" : ' + _inattr + ",\n" +
+ '\t"attr" : ' + _attr + ",\n" +
+ '\t"offset" : ' + _offset + ",\n" +
+ '\t"entryHeaderSize" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + " bytes\n" +
+ '}';
+ }
+ }
+};
-const compile = (ast, options = {}) => {
- let walk = (node, parent = {}) => {
- let invalidBlock = utils.isInvalidBrace(parent);
- let invalidNode = node.invalid === true && options.escapeInvalid === true;
- let invalid = invalidBlock === true || invalidNode === true;
- let prefix = options.escapeInvalid === true ? '\\' : '';
- let output = '';
- if (node.isOpen === true) {
- return prefix + node.value;
- }
- if (node.isClose === true) {
- return prefix + node.value;
- }
+/***/ }),
- if (node.type === 'open') {
- return invalid ? (prefix + node.value) : '(';
- }
+/***/ 4958:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
- if (node.type === 'close') {
- return invalid ? (prefix + node.value) : ')';
- }
+exports.EntryHeader = __nccwpck_require__(9032);
+exports.MainHeader = __nccwpck_require__(4408);
- if (node.type === 'comma') {
- return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
- }
- if (node.value) {
- return node.value;
- }
+/***/ }),
- if (node.nodes && node.ranges > 0) {
- let args = utils.reduce(node.nodes);
- let range = fill(...args, { ...options, wrap: false, toRegex: true });
+/***/ 4408:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (range.length !== 0) {
- return args.length > 1 && range.length > 1 ? `(${range})` : range;
- }
- }
+var Utils = __nccwpck_require__(5182),
+ Constants = Utils.Constants;
- if (node.nodes) {
- for (let child of node.nodes) {
- output += walk(child, node);
- }
- }
- return output;
- };
+/* The entries in the end of central directory */
+module.exports = function () {
+ var _volumeEntries = 0,
+ _totalEntries = 0,
+ _size = 0,
+ _offset = 0,
+ _commentLength = 0;
- return walk(ast);
-};
+ return {
+ get diskEntries () { return _volumeEntries },
+ set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; },
-module.exports = compile;
+ get totalEntries () { return _totalEntries },
+ set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; },
+ get size () { return _size },
+ set size (/*Number*/val) { _size = val; },
-/***/ }),
+ get offset () { return _offset },
+ set offset (/*Number*/val) { _offset = val; },
-/***/ 5412:
-/***/ ((module) => {
+ get commentLength () { return _commentLength },
+ set commentLength (/*Number*/val) { _commentLength = val; },
-"use strict";
+ get mainHeaderSize () {
+ return Constants.ENDHDR + _commentLength;
+ },
+ loadFromBinary : function(/*Buffer*/data) {
+ // data should be 22 bytes and start with "PK 05 06"
+ // or be 56+ bytes and start with "PK 06 06" for Zip64
+ if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
+ (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) {
-module.exports = {
- MAX_LENGTH: 1024 * 64,
+ throw new Error(Utils.Errors.INVALID_END);
+ }
- // Digits
- CHAR_0: '0', /* 0 */
- CHAR_9: '9', /* 9 */
+ if (data.readUInt32LE(0) === Constants.ENDSIG) {
+ // number of entries on this volume
+ _volumeEntries = data.readUInt16LE(Constants.ENDSUB);
+ // total number of entries
+ _totalEntries = data.readUInt16LE(Constants.ENDTOT);
+ // central directory size in bytes
+ _size = data.readUInt32LE(Constants.ENDSIZ);
+ // offset of first CEN header
+ _offset = data.readUInt32LE(Constants.ENDOFF);
+ // zip file comment length
+ _commentLength = data.readUInt16LE(Constants.ENDCOM);
+ } else {
+ // number of entries on this volume
+ _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
+ // total number of entries
+ _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
+ // central directory size in bytes
+ _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);
+ // offset of first CEN header
+ _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
+
+ _commentLength = 0;
+ }
- // Alphabet chars.
- CHAR_UPPERCASE_A: 'A', /* A */
- CHAR_LOWERCASE_A: 'a', /* a */
- CHAR_UPPERCASE_Z: 'Z', /* Z */
- CHAR_LOWERCASE_Z: 'z', /* z */
+ },
- CHAR_LEFT_PARENTHESES: '(', /* ( */
- CHAR_RIGHT_PARENTHESES: ')', /* ) */
+ toBinary : function() {
+ var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
+ // "PK 05 06" signature
+ b.writeUInt32LE(Constants.ENDSIG, 0);
+ b.writeUInt32LE(0, 4);
+ // number of entries on this volume
+ b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
+ // total number of entries
+ b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
+ // central directory size in bytes
+ b.writeUInt32LE(_size, Constants.ENDSIZ);
+ // offset of first CEN header
+ b.writeUInt32LE(_offset, Constants.ENDOFF);
+ // zip file comment length
+ b.writeUInt16LE(_commentLength, Constants.ENDCOM);
+ // fill comment memory with spaces so no garbage is left there
+ b.fill(" ", Constants.ENDHDR);
+
+ return b;
+ },
- CHAR_ASTERISK: '*', /* * */
+ toString : function() {
+ return '{\n' +
+ '\t"diskEntries" : ' + _volumeEntries + ",\n" +
+ '\t"totalEntries" : ' + _totalEntries + ",\n" +
+ '\t"size" : ' + _size + " bytes,\n" +
+ '\t"offset" : 0x' + _offset.toString(16).toUpperCase() + ",\n" +
+ '\t"commentLength" : 0x' + _commentLength + "\n" +
+ '}';
+ }
+ }
+};
- // Non-alphabetic chars.
- CHAR_AMPERSAND: '&', /* & */
- CHAR_AT: '@', /* @ */
- CHAR_BACKSLASH: '\\', /* \ */
- CHAR_BACKTICK: '`', /* ` */
- CHAR_CARRIAGE_RETURN: '\r', /* \r */
- CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
- CHAR_COLON: ':', /* : */
- CHAR_COMMA: ',', /* , */
- CHAR_DOLLAR: '$', /* . */
- CHAR_DOT: '.', /* . */
- CHAR_DOUBLE_QUOTE: '"', /* " */
- CHAR_EQUAL: '=', /* = */
- CHAR_EXCLAMATION_MARK: '!', /* ! */
- CHAR_FORM_FEED: '\f', /* \f */
- CHAR_FORWARD_SLASH: '/', /* / */
- CHAR_HASH: '#', /* # */
- CHAR_HYPHEN_MINUS: '-', /* - */
- CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
- CHAR_LEFT_CURLY_BRACE: '{', /* { */
- CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
- CHAR_LINE_FEED: '\n', /* \n */
- CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
- CHAR_PERCENT: '%', /* % */
- CHAR_PLUS: '+', /* + */
- CHAR_QUESTION_MARK: '?', /* ? */
- CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
- CHAR_RIGHT_CURLY_BRACE: '}', /* } */
- CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
- CHAR_SEMICOLON: ';', /* ; */
- CHAR_SINGLE_QUOTE: '\'', /* ' */
- CHAR_SPACE: ' ', /* */
- CHAR_TAB: '\t', /* \t */
- CHAR_UNDERSCORE: '_', /* _ */
- CHAR_VERTICAL_LINE: '|', /* | */
- CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+/***/ }),
+
+/***/ 7686:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = function (/*Buffer*/inbuf) {
+
+ var zlib = __nccwpck_require__(8761);
+
+ var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024};
+
+ return {
+ deflate: function () {
+ return zlib.deflateRawSync(inbuf, opts);
+ },
+
+ deflateAsync: function (/*Function*/callback) {
+ var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0;
+ tmp.on('data', function (data) {
+ parts.push(data);
+ total += data.length;
+ });
+ tmp.on('end', function () {
+ var buf = Buffer.alloc(total), written = 0;
+ buf.fill(0);
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ part.copy(buf, written);
+ written += part.length;
+ }
+ callback && callback(buf);
+ });
+ tmp.end(inbuf);
+ }
+ }
};
/***/ }),
-/***/ 6944:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 3928:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-"use strict";
+exports.Deflater = __nccwpck_require__(7686);
+exports.Inflater = __nccwpck_require__(2153);
+exports.ZipCrypto = __nccwpck_require__(3228);
+/***/ }),
-const fill = __nccwpck_require__(791);
-const stringify = __nccwpck_require__(4810);
-const utils = __nccwpck_require__(7691);
+/***/ 2153:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-const append = (queue = '', stash = '', enclose = false) => {
- let result = [];
+module.exports = function (/*Buffer*/inbuf) {
- queue = [].concat(queue);
- stash = [].concat(stash);
+ var zlib = __nccwpck_require__(8761);
- if (!stash.length) return queue;
- if (!queue.length) {
- return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
- }
+ return {
+ inflate: function () {
+ return zlib.inflateRawSync(inbuf);
+ },
- for (let item of queue) {
- if (Array.isArray(item)) {
- for (let value of item) {
- result.push(append(value, stash, enclose));
- }
- } else {
- for (let ele of stash) {
- if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
- result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
- }
+ inflateAsync: function (/*Function*/callback) {
+ var tmp = zlib.createInflateRaw(), parts = [], total = 0;
+ tmp.on('data', function (data) {
+ parts.push(data);
+ total += data.length;
+ });
+ tmp.on('end', function () {
+ var buf = Buffer.alloc(total), written = 0;
+ buf.fill(0);
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ part.copy(buf, written);
+ written += part.length;
+ }
+ callback && callback(buf);
+ });
+ tmp.end(inbuf);
}
}
- return utils.flatten(result);
};
-const expand = (ast, options = {}) => {
- let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
- let walk = (node, parent = {}) => {
- node.queue = [];
+/***/ }),
- let p = parent;
- let q = parent.queue;
+/***/ 3228:
+/***/ ((module) => {
- while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
- p = p.parent;
- q = p.queue;
+// generate CRC32 lookup table
+const crctable = (new Uint32Array(256)).map((t,crc)=>{
+ for(let j=0;j<8;j++){
+ if (0 !== (crc & 1)){
+ crc = (crc >>> 1) ^ 0xEDB88320
+ }else{
+ crc >>>= 1
+ }
}
+ return crc>>>0;
+});
- if (node.invalid || node.dollar) {
- q.push(append(q.pop(), stringify(node, options)));
- return;
+function make_decrypter(/*Buffer*/pwd){
+ // C-style uInt32 Multiply
+ const uMul = (a,b) => Math.imul(a, b) >>> 0;
+ // Initialize keys with default values
+ const keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
+ // crc32 byte update
+ const crc32update = (pCrc32, bval) => {
+ return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
+ }
+ // update keys with byteValues
+ const updateKeys = (byteValue) => {
+ keys[0] = crc32update(keys[0], byteValue);
+ keys[1] += keys[0] & 0xff;
+ keys[1] = uMul(keys[1], 134775813) + 1;
+ keys[2] = crc32update(keys[2], keys[1] >>> 24);
+ }
+
+ // 1. Stage initialize key
+ const pass = (Buffer.isBuffer(pwd)) ? pwd : Buffer.from(pwd);
+ for(let i=0; i< pass.length; i++){
+ updateKeys(pass[i]);
+ }
+
+ // return decrypter function
+ return function (/*Buffer*/data){
+ if (!Buffer.isBuffer(data)){
+ throw 'decrypter needs Buffer'
+ }
+ // result - we create new Buffer for results
+ const result = Buffer.alloc(data.length);
+ let pos = 0;
+ // process input data
+ for(let c of data){
+ const k = (keys[2] | 2) >>> 0; // key
+ c ^= (uMul(k, k^1) >> 8) & 0xff; // decode
+ result[pos++] = c; // Save Value
+ updateKeys(c); // update keys with decoded byte
+ }
+ return result;
}
+}
- if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
- q.push(append(q.pop(), ['{}']));
- return;
+function decrypt(/*Buffer*/ data, /*Object*/header, /*String, Buffer*/ pwd){
+ if (!data || !Buffer.isBuffer(data) || data.length < 12) {
+ return Buffer.alloc(0);
}
+
+ // We Initialize and generate decrypting function
+ const decrypter = make_decrypter(pwd);
- if (node.nodes && node.ranges > 0) {
- let args = utils.reduce(node.nodes);
-
- if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
- throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
- }
-
- let range = fill(...args, options);
- if (range.length === 0) {
- range = stringify(node, options);
- }
+ // check - for testing password
+ const check = header.crc >>> 24;
+ // decrypt salt what is always 12 bytes and is a part of file content
+ const testbyte = decrypter(data.slice(0, 12))[11];
- q.push(append(q.pop(), range));
- node.nodes = [];
- return;
+ // does password meet expectations
+ if (check !== testbyte){
+ throw 'ADM-ZIP: Wrong Password';
}
- let enclose = utils.encloseBrace(node);
- let queue = node.queue;
- let block = node;
-
- while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
- block = block.parent;
- queue = block.queue;
- }
+ // decode content
+ return decrypter(data.slice(12));
+}
- for (let i = 0; i < node.nodes.length; i++) {
- let child = node.nodes[i];
+module.exports = {decrypt};
- if (child.type === 'comma' && node.type === 'brace') {
- if (i === 1) queue.push('');
- queue.push('');
- continue;
- }
- if (child.type === 'close') {
- q.push(append(q.pop(), queue, enclose));
- continue;
- }
+/***/ }),
- if (child.value && child.type !== 'open') {
- queue.push(append(queue.pop(), child.value));
- continue;
- }
+/***/ 4522:
+/***/ ((module) => {
- if (child.nodes) {
- walk(child, node);
- }
- }
+module.exports = {
+ /* The local file header */
+ LOCHDR : 30, // LOC header size
+ LOCSIG : 0x04034b50, // "PK\003\004"
+ LOCVER : 4, // version needed to extract
+ LOCFLG : 6, // general purpose bit flag
+ LOCHOW : 8, // compression method
+ LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
+ LOCCRC : 14, // uncompressed file crc-32 value
+ LOCSIZ : 18, // compressed size
+ LOCLEN : 22, // uncompressed size
+ LOCNAM : 26, // filename length
+ LOCEXT : 28, // extra field length
+
+ /* The Data descriptor */
+ EXTSIG : 0x08074b50, // "PK\007\008"
+ EXTHDR : 16, // EXT header size
+ EXTCRC : 4, // uncompressed file crc-32 value
+ EXTSIZ : 8, // compressed size
+ EXTLEN : 12, // uncompressed size
+
+ /* The central directory file header */
+ CENHDR : 46, // CEN header size
+ CENSIG : 0x02014b50, // "PK\001\002"
+ CENVEM : 4, // version made by
+ CENVER : 6, // version needed to extract
+ CENFLG : 8, // encrypt, decrypt flags
+ CENHOW : 10, // compression method
+ CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
+ CENCRC : 16, // uncompressed file crc-32 value
+ CENSIZ : 20, // compressed size
+ CENLEN : 24, // uncompressed size
+ CENNAM : 28, // filename length
+ CENEXT : 30, // extra field length
+ CENCOM : 32, // file comment length
+ CENDSK : 34, // volume number start
+ CENATT : 36, // internal file attributes
+ CENATX : 38, // external file attributes (host system dependent)
+ CENOFF : 42, // LOC header offset
+
+ /* The entries in the end of central directory */
+ ENDHDR : 22, // END header size
+ ENDSIG : 0x06054b50, // "PK\005\006"
+ ENDSUB : 8, // number of entries on this disk
+ ENDTOT : 10, // total number of entries
+ ENDSIZ : 12, // central directory size in bytes
+ ENDOFF : 16, // offset of first CEN header
+ ENDCOM : 20, // zip file comment length
+
+ END64HDR : 20, // zip64 END header size
+ END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
+ END64START : 4, // number of the disk with the start of the zip64
+ END64OFF : 8, // relative offset of the zip64 end of central directory
+ END64NUMDISKS : 16, // total number of disks
+
+ ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
+ ZIP64HDR : 56, // zip64 record minimum size
+ ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
+ ZIP64SIZE : 4, // zip64 size of the central directory record
+ ZIP64VEM : 12, // zip64 version made by
+ ZIP64VER : 14, // zip64 version needed to extract
+ ZIP64DSK : 16, // zip64 number of this disk
+ ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
+ ZIP64SUB : 24, // number of entries on this disk
+ ZIP64TOT : 32, // total number of entries
+ ZIP64SIZB : 40, // zip64 central directory size in bytes
+ ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
+ ZIP64EXTRA : 56, // extensible data sector
+
+ /* Compression methods */
+ STORED : 0, // no compression
+ SHRUNK : 1, // shrunk
+ REDUCED1 : 2, // reduced with compression factor 1
+ REDUCED2 : 3, // reduced with compression factor 2
+ REDUCED3 : 4, // reduced with compression factor 3
+ REDUCED4 : 5, // reduced with compression factor 4
+ IMPLODED : 6, // imploded
+ // 7 reserved
+ DEFLATED : 8, // deflated
+ ENHANCED_DEFLATED: 9, // enhanced deflated
+ PKWARE : 10,// PKWare DCL imploded
+ // 11 reserved
+ BZIP2 : 12, // compressed using BZIP2
+ // 13 reserved
+ LZMA : 14, // LZMA
+ // 15-17 reserved
+ IBM_TERSE : 18, // compressed using IBM TERSE
+ IBM_LZ77 : 19, //IBM LZ77 z
+
+ /* General purpose bit flag */
+ FLG_ENC : 0, // encripted file
+ FLG_COMP1 : 1, // compression option
+ FLG_COMP2 : 2, // compression option
+ FLG_DESC : 4, // data descriptor
+ FLG_ENH : 8, // enhanced deflation
+ FLG_STR : 16, // strong encryption
+ FLG_LNG : 1024, // language encoding
+ FLG_MSK : 4096, // mask header values
+
+ /* Load type */
+ FILE : 0,
+ BUFFER : 1,
+ NONE : 2,
+
+ /* 4.5 Extensible data fields */
+ EF_ID : 0,
+ EF_SIZE : 2,
+
+ /* Header IDs */
+ ID_ZIP64 : 0x0001,
+ ID_AVINFO : 0x0007,
+ ID_PFS : 0x0008,
+ ID_OS2 : 0x0009,
+ ID_NTFS : 0x000a,
+ ID_OPENVMS : 0x000c,
+ ID_UNIX : 0x000d,
+ ID_FORK : 0x000e,
+ ID_PATCH : 0x000f,
+ ID_X509_PKCS7 : 0x0014,
+ ID_X509_CERTID_F : 0x0015,
+ ID_X509_CERTID_C : 0x0016,
+ ID_STRONGENC : 0x0017,
+ ID_RECORD_MGT : 0x0018,
+ ID_X509_PKCS7_RL : 0x0019,
+ ID_IBM1 : 0x0065,
+ ID_IBM2 : 0x0066,
+ ID_POSZIP : 0x4690,
+
+ EF_ZIP64_OR_32 : 0xffffffff,
+ EF_ZIP64_OR_16 : 0xffff,
+ EF_ZIP64_SUNCOMP : 0,
+ EF_ZIP64_SCOMP : 8,
+ EF_ZIP64_RHO : 16,
+ EF_ZIP64_DSN : 24
+};
- return queue;
- };
- return utils.flatten(walk(ast));
-};
+/***/ }),
-module.exports = expand;
+/***/ 1255:
+/***/ ((module) => {
+module.exports = {
+ /* Header error messages */
+ "INVALID_LOC" : "Invalid LOC header (bad signature)",
+ "INVALID_CEN" : "Invalid CEN header (bad signature)",
+ "INVALID_END" : "Invalid END header (bad signature)",
+
+ /* ZipEntry error messages*/
+ "NO_DATA" : "Nothing to decompress",
+ "BAD_CRC" : "CRC32 checksum failed",
+ "FILE_IN_THE_WAY" : "There is a file in the way: %s",
+ "UNKNOWN_METHOD" : "Invalid/unsupported compression method",
+
+ /* Inflater error messages */
+ "AVAIL_DATA" : "inflate::Available inflate data did not terminate",
+ "INVALID_DISTANCE" : "inflate::Invalid literal/length or distance code in fixed or dynamic block",
+ "TO_MANY_CODES" : "inflate::Dynamic block code description: too many length or distance codes",
+ "INVALID_REPEAT_LEN" : "inflate::Dynamic block code description: repeat more than specified lengths",
+ "INVALID_REPEAT_FIRST" : "inflate::Dynamic block code description: repeat lengths with no first length",
+ "INCOMPLETE_CODES" : "inflate::Dynamic block code description: code lengths codes incomplete",
+ "INVALID_DYN_DISTANCE": "inflate::Dynamic block code description: invalid distance code lengths",
+ "INVALID_CODES_LEN": "inflate::Dynamic block code description: invalid literal/length code lengths",
+ "INVALID_STORE_BLOCK" : "inflate::Stored block length did not match one's complement",
+ "INVALID_BLOCK_TYPE" : "inflate::Invalid block type (type == 3)",
+
+ /* ADM-ZIP error messages */
+ "CANT_EXTRACT_FILE" : "Could not extract the file",
+ "CANT_OVERRIDE" : "Target file already exists",
+ "NO_ZIP" : "No zip file was loaded",
+ "NO_ENTRY" : "Entry doesn't exist",
+ "DIRECTORY_CONTENT_ERROR" : "A directory cannot have content",
+ "FILE_NOT_FOUND" : "File not found: %s",
+ "NOT_IMPLEMENTED" : "Not implemented",
+ "INVALID_FILENAME" : "Invalid filename",
+ "INVALID_FORMAT" : "Invalid or unsupported zip format. No END header found"
+};
/***/ }),
-/***/ 9889:
+/***/ 8321:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-"use strict";
+var fs = __nccwpck_require__(2895).require(),
+ pth = __nccwpck_require__(5622);
+
+fs.existsSync = fs.existsSync || pth.existsSync;
+module.exports = function(/*String*/path) {
-const stringify = __nccwpck_require__(4810);
+ var _path = path || "",
+ _permissions = 0,
+ _obj = newAttr(),
+ _stat = null;
-/**
- * Constants
- */
+ function newAttr() {
+ return {
+ directory : false,
+ readonly : false,
+ hidden : false,
+ executable : false,
+ mtime : 0,
+ atime : 0
+ }
+ }
+
+ if (_path && fs.existsSync(_path)) {
+ _stat = fs.statSync(_path);
+ _obj.directory = _stat.isDirectory();
+ _obj.mtime = _stat.mtime;
+ _obj.atime = _stat.atime;
+ _obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner
+ _obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right
+ _obj.hidden = pth.basename(_path)[0] === ".";
+ } else {
+ console.warn("Invalid path: " + _path)
+ }
-const {
- MAX_LENGTH,
- CHAR_BACKSLASH, /* \ */
- CHAR_BACKTICK, /* ` */
- CHAR_COMMA, /* , */
- CHAR_DOT, /* . */
- CHAR_LEFT_PARENTHESES, /* ( */
- CHAR_RIGHT_PARENTHESES, /* ) */
- CHAR_LEFT_CURLY_BRACE, /* { */
- CHAR_RIGHT_CURLY_BRACE, /* } */
- CHAR_LEFT_SQUARE_BRACKET, /* [ */
- CHAR_RIGHT_SQUARE_BRACKET, /* ] */
- CHAR_DOUBLE_QUOTE, /* " */
- CHAR_SINGLE_QUOTE, /* ' */
- CHAR_NO_BREAK_SPACE,
- CHAR_ZERO_WIDTH_NOBREAK_SPACE
-} = __nccwpck_require__(5412);
+ return {
-/**
- * parse
- */
+ get directory () {
+ return _obj.directory;
+ },
-const parse = (input, options = {}) => {
- if (typeof input !== 'string') {
- throw new TypeError('Expected a string');
- }
+ get readOnly () {
+ return _obj.readonly;
+ },
- let opts = options || {};
- let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
- if (input.length > max) {
- throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
- }
+ get hidden () {
+ return _obj.hidden;
+ },
- let ast = { type: 'root', input, nodes: [] };
- let stack = [ast];
- let block = ast;
- let prev = ast;
- let brackets = 0;
- let length = input.length;
- let index = 0;
- let depth = 0;
- let value;
- let memo = {};
+ get mtime () {
+ return _obj.mtime;
+ },
- /**
- * Helpers
- */
+ get atime () {
+ return _obj.atime;
+ },
- const advance = () => input[index++];
- const push = node => {
- if (node.type === 'text' && prev.type === 'dot') {
- prev.type = 'text';
- }
- if (prev && prev.type === 'text' && node.type === 'text') {
- prev.value += node.value;
- return;
- }
+ get executable () {
+ return _obj.executable;
+ },
- block.nodes.push(node);
- node.parent = block;
- node.prev = prev;
- prev = node;
- return node;
- };
+ decodeAttributes : function(val) {
- push({ type: 'bos' });
+ },
- while (index < length) {
- block = stack[stack.length - 1];
- value = advance();
+ encodeAttributes : function (val) {
- /**
- * Invalid chars
- */
+ },
- if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
- continue;
+ toString : function() {
+ return '{\n' +
+ '\t"path" : "' + _path + ",\n" +
+ '\t"isDirectory" : ' + _obj.directory + ",\n" +
+ '\t"isReadOnly" : ' + _obj.readonly + ",\n" +
+ '\t"isHidden" : ' + _obj.hidden + ",\n" +
+ '\t"isExecutable" : ' + _obj.executable + ",\n" +
+ '\t"mTime" : ' + _obj.mtime + "\n" +
+ '\t"aTime" : ' + _obj.atime + "\n" +
+ '}';
+ }
}
- /**
- * Escaped chars
- */
-
- if (value === CHAR_BACKSLASH) {
- push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
- continue;
- }
+};
- /**
- * Right square bracket (literal): ']'
- */
- if (value === CHAR_RIGHT_SQUARE_BRACKET) {
- push({ type: 'text', value: '\\' + value });
- continue;
- }
+/***/ }),
- /**
- * Left square bracket: '['
- */
+/***/ 2895:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
- if (value === CHAR_LEFT_SQUARE_BRACKET) {
- brackets++;
+exports.require = function() {
+ var fs = __nccwpck_require__(5747);
+ if (process && process.versions && process.versions['electron']) {
+ try {
+ originalFs = __nccwpck_require__(2941);
+ if (Object.keys(originalFs).length > 0) {
+ fs = originalFs;
+ }
+ } catch (e) {}
+ }
+ return fs
+};
- let closed = true;
- let next;
- while (index < length && (next = advance())) {
- value += next;
+/***/ }),
- if (next === CHAR_LEFT_SQUARE_BRACKET) {
- brackets++;
- continue;
- }
+/***/ 5182:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (next === CHAR_BACKSLASH) {
- value += advance();
- continue;
- }
+module.exports = __nccwpck_require__(1291);
+module.exports.FileSystem = __nccwpck_require__(2895);
+module.exports.Constants = __nccwpck_require__(4522);
+module.exports.Errors = __nccwpck_require__(1255);
+module.exports.FileAttr = __nccwpck_require__(8321);
- if (next === CHAR_RIGHT_SQUARE_BRACKET) {
- brackets--;
+/***/ }),
- if (brackets === 0) {
- break;
- }
- }
- }
+/***/ 1291:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- push({ type: 'text', value });
- continue;
- }
+var fs = __nccwpck_require__(2895).require(),
+ pth = __nccwpck_require__(5622);
- /**
- * Parentheses
- */
+fs.existsSync = fs.existsSync || pth.existsSync;
- if (value === CHAR_LEFT_PARENTHESES) {
- block = push({ type: 'paren', nodes: [] });
- stack.push(block);
- push({ type: 'text', value });
- continue;
- }
+module.exports = (function() {
- if (value === CHAR_RIGHT_PARENTHESES) {
- if (block.type !== 'paren') {
- push({ type: 'text', value });
- continue;
- }
- block = stack.pop();
- push({ type: 'text', value });
- block = stack[stack.length - 1];
- continue;
- }
+ var crcTable = [],
+ Constants = __nccwpck_require__(4522),
+ Errors = __nccwpck_require__(1255),
- /**
- * Quotes: '|"|`
- */
+ PATH_SEPARATOR = pth.sep;
- if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
- let open = value;
- let next;
- if (options.keepQuotes !== true) {
- value = '';
- }
+ function mkdirSync(/*String*/path) {
+ var resolvedPath = path.split(PATH_SEPARATOR)[0];
+ path.split(PATH_SEPARATOR).forEach(function(name) {
+ if (!name || name.substr(-1,1) === ":") return;
+ resolvedPath += PATH_SEPARATOR + name;
+ var stat;
+ try {
+ stat = fs.statSync(resolvedPath);
+ } catch (e) {
+ fs.mkdirSync(resolvedPath);
+ }
+ if (stat && stat.isFile())
+ throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
+ });
+ }
- while (index < length && (next = advance())) {
- if (next === CHAR_BACKSLASH) {
- value += next + advance();
- continue;
+ function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) {
+ if (typeof pattern === 'boolean') {
+ recoursive = pattern;
+ pattern = undefined;
}
+ var files = [];
+ fs.readdirSync(dir).forEach(function(file) {
+ var path = pth.join(dir, file);
- if (next === open) {
- if (options.keepQuotes === true) value += next;
- break;
- }
+ if (fs.statSync(path).isDirectory() && recoursive)
+ files = files.concat(findSync(path, pattern, recoursive));
- value += next;
- }
+ if (!pattern || pattern.test(path)) {
+ files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : ""));
+ }
- push({ type: 'text', value });
- continue;
+ });
+ return files;
}
- /**
- * Left curly brace: '{'
- */
+ function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) {
+ var slice = Buffer.from(buffer.slice(index, index + 8));
+ slice.swap64();
- if (value === CHAR_LEFT_CURLY_BRACE) {
- depth++;
+ return parseInt(`0x${ slice.toString('hex') }`);
+ }
- let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
- let brace = {
- type: 'brace',
- open: true,
- close: false,
- dollar,
- depth,
- commas: 0,
- ranges: 0,
- nodes: []
- };
+ return {
+ makeDir : function(/*String*/path) {
+ mkdirSync(path);
+ },
- block = push(brace);
- stack.push(block);
- push({ type: 'open', value });
- continue;
- }
+ crc32 : function(buf) {
+ if (typeof buf === 'string') {
+ buf = Buffer.alloc(buf.length, buf);
+ }
+ var b = Buffer.alloc(4);
+ if (!crcTable.length) {
+ for (var n = 0; n < 256; n++) {
+ var c = n;
+ for (var k = 8; --k >= 0;) //
+ if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; }
+ if (c < 0) {
+ b.writeInt32LE(c, 0);
+ c = b.readUInt32LE(0);
+ }
+ crcTable[n] = c;
+ }
+ }
+ var crc = 0, off = 0, len = buf.length, c1 = ~crc;
+ while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8);
+ crc = ~c1;
+ b.writeInt32LE(crc & 0xffffffff, 0);
+ return b.readUInt32LE(0);
+ },
- /**
- * Right curly brace: '}'
- */
+ methodToString : function(/*Number*/method) {
+ switch (method) {
+ case Constants.STORED:
+ return 'STORED (' + method + ')';
+ case Constants.DEFLATED:
+ return 'DEFLATED (' + method + ')';
+ default:
+ return 'UNSUPPORTED (' + method + ')';
+ }
- if (value === CHAR_RIGHT_CURLY_BRACE) {
- if (block.type !== 'brace') {
- push({ type: 'text', value });
- continue;
- }
+ },
- let type = 'close';
- block = stack.pop();
- block.close = true;
+ writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) {
+ if (fs.existsSync(path)) {
+ if (!overwrite)
+ return false; // cannot overwrite
- push({ type, value });
- depth--;
+ var stat = fs.statSync(path);
+ if (stat.isDirectory()) {
+ return false;
+ }
+ }
+ var folder = pth.dirname(path);
+ if (!fs.existsSync(folder)) {
+ mkdirSync(folder);
+ }
- block = stack[stack.length - 1];
- continue;
- }
+ var fd;
+ try {
+ fd = fs.openSync(path, 'w', 438); // 0666
+ } catch(e) {
+ fs.chmodSync(path, 438);
+ fd = fs.openSync(path, 'w', 438);
+ }
+ if (fd) {
+ try {
+ fs.writeSync(fd, content, 0, content.length, 0);
+ }
+ catch (e){
+ throw e;
+ }
+ finally {
+ fs.closeSync(fd);
+ }
+ }
+ fs.chmodSync(path, attr || 438);
+ return true;
+ },
- /**
- * Comma: ','
- */
+ writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) {
+ if(typeof attr === 'function') {
+ callback = attr;
+ attr = undefined;
+ }
- if (value === CHAR_COMMA && depth > 0) {
- if (block.ranges > 0) {
- block.ranges = 0;
- let open = block.nodes.shift();
- block.nodes = [open, { type: 'text', value: stringify(block) }];
- }
+ fs.exists(path, function(exists) {
+ if(exists && !overwrite)
+ return callback(false);
- push({ type: 'comma', value });
- block.commas++;
- continue;
- }
+ fs.stat(path, function(err, stat) {
+ if(exists &&stat.isDirectory()) {
+ return callback(false);
+ }
- /**
- * Dot: '.'
- */
+ var folder = pth.dirname(path);
+ fs.exists(folder, function(exists) {
+ if(!exists)
+ mkdirSync(folder);
+
+ fs.open(path, 'w', 438, function(err, fd) {
+ if(err) {
+ fs.chmod(path, 438, function() {
+ fs.open(path, 'w', 438, function(err, fd) {
+ fs.write(fd, content, 0, content.length, 0, function() {
+ fs.close(fd, function() {
+ fs.chmod(path, attr || 438, function() {
+ callback(true);
+ })
+ });
+ });
+ });
+ })
+ } else {
+ if(fd) {
+ fs.write(fd, content, 0, content.length, 0, function() {
+ fs.close(fd, function() {
+ fs.chmod(path, attr || 438, function() {
+ callback(true);
+ })
+ });
+ });
+ } else {
+ fs.chmod(path, attr || 438, function() {
+ callback(true);
+ })
+ }
+ }
+ });
+ })
+ })
+ })
+ },
- if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
- let siblings = block.nodes;
+ findFiles : function(/*String*/path) {
+ return findSync(path, true);
+ },
- if (depth === 0 || siblings.length === 0) {
- push({ type: 'text', value });
- continue;
- }
+ getAttributes : function(/*String*/path) {
- if (prev.type === 'dot') {
- block.range = [];
- prev.value += value;
- prev.type = 'range';
+ },
- if (block.nodes.length !== 3 && block.nodes.length !== 5) {
- block.invalid = true;
- block.ranges = 0;
- prev.type = 'text';
- continue;
- }
+ setAttributes : function(/*String*/path) {
- block.ranges++;
- block.args = [];
- continue;
- }
+ },
- if (prev.type === 'range') {
- siblings.pop();
+ toBuffer : function(input) {
+ if (Buffer.isBuffer(input)) {
+ return input;
+ } else {
+ if (input.length === 0) {
+ return Buffer.alloc(0)
+ }
+ return Buffer.from(input, 'utf8');
+ }
+ },
- let before = siblings[siblings.length - 1];
- before.value += prev.value + value;
- prev = before;
- block.ranges--;
- continue;
- }
+ readBigUInt64LE,
- push({ type: 'dot', value });
- continue;
+ Constants : Constants,
+ Errors : Errors
}
+})();
- /**
- * Text
- */
- push({ type: 'text', value });
- }
+/***/ }),
- // Mark imbalanced braces and brackets as invalid
- do {
- block = stack.pop();
+/***/ 4057:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (block.type !== 'root') {
- block.nodes.forEach(node => {
- if (!node.nodes) {
- if (node.type === 'open') node.isOpen = true;
- if (node.type === 'close') node.isClose = true;
- if (!node.nodes) node.type = 'text';
- node.invalid = true;
- }
- });
+var Utils = __nccwpck_require__(5182),
+ Headers = __nccwpck_require__(4958),
+ Constants = Utils.Constants,
+ Methods = __nccwpck_require__(3928);
- // get the location of the block on parent.nodes (block's siblings)
- let parent = stack[stack.length - 1];
- let index = parent.nodes.indexOf(block);
- // replace the (invalid) block with it's nodes
- parent.nodes.splice(index, 1, ...block.nodes);
- }
- } while (stack.length > 0);
+module.exports = function (/*Buffer*/input) {
- push({ type: 'eos' });
- return ast;
-};
+ var _entryHeader = new Headers.EntryHeader(),
+ _entryName = Buffer.alloc(0),
+ _comment = Buffer.alloc(0),
+ _isDirectory = false,
+ uncompressedData = null,
+ _extra = Buffer.alloc(0);
-module.exports = parse;
+ function getCompressedDataFromZip() {
+ if (!input || !Buffer.isBuffer(input)) {
+ return Buffer.alloc(0);
+ }
+ _entryHeader.loadDataHeaderFromBinary(input);
+ return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)
+ }
+ function crc32OK(data) {
+ // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
+ if ((_entryHeader.flags & 0x8) !== 0x8) {
+ if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {
+ return false;
+ }
+ } else {
+ // @TODO: load and check data descriptor header
+ // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
+ // (optionally preceded by a 4-byte signature) immediately after the compressed data:
+ }
+ return true;
+ }
-/***/ }),
+ function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) {
+ if(typeof callback === 'undefined' && typeof async === 'string') {
+ pass=async;
+ async=void 0;
+ }
+ if (_isDirectory) {
+ if (async && callback) {
+ callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
+ }
+ return Buffer.alloc(0);
+ }
-/***/ 4810:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ var compressedData = getCompressedDataFromZip();
-"use strict";
+ if (compressedData.length === 0) {
+ // File is empty, nothing to decompress.
+ if (async && callback) callback(compressedData);
+ return compressedData;
+ }
+ if (_entryHeader.encripted){
+ if ('string' !== typeof pass && !Buffer.isBuffer(pass)){
+ throw new Error('ADM-ZIP: Incompatible password parameter');
+ }
+ compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);
+ }
-const utils = __nccwpck_require__(7691);
+ var data = Buffer.alloc(_entryHeader.size);
-module.exports = (ast, options = {}) => {
- let stringify = (node, parent = {}) => {
- let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
- let invalidNode = node.invalid === true && options.escapeInvalid === true;
- let output = '';
+ switch (_entryHeader.method) {
+ case Utils.Constants.STORED:
+ compressedData.copy(data);
+ if (!crc32OK(data)) {
+ if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error
+ throw new Error(Utils.Errors.BAD_CRC);
+ } else {//si added otherwise did not seem to return data.
+ if (async && callback) callback(data);
+ return data;
+ }
+ case Utils.Constants.DEFLATED:
+ var inflater = new Methods.Inflater(compressedData);
+ if (!async) {
+ var result = inflater.inflate(data);
+ result.copy(data, 0);
+ if (!crc32OK(data)) {
+ throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString());
+ }
+ return data;
+ } else {
+ inflater.inflateAsync(function(result) {
+ result.copy(data, 0);
+ if (!crc32OK(data)) {
+ if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error
+ } else { //si added otherwise did not seem to return data.
+ if (callback) callback(data);
+ }
+ })
+ }
+ break;
+ default:
+ if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);
+ throw new Error(Utils.Errors.UNKNOWN_METHOD);
+ }
+ }
- if (node.value) {
- if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
- return '\\' + node.value;
- }
- return node.value;
+ function compress(/*Boolean*/async, /*Function*/callback) {
+ if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
+ // no data set or the data wasn't changed to require recompression
+ if (async && callback) callback(getCompressedDataFromZip());
+ return getCompressedDataFromZip();
+ }
+
+ if (uncompressedData.length && !_isDirectory) {
+ var compressedData;
+ // Local file header
+ switch (_entryHeader.method) {
+ case Utils.Constants.STORED:
+ _entryHeader.compressedSize = _entryHeader.size;
+
+ compressedData = Buffer.alloc(uncompressedData.length);
+ uncompressedData.copy(compressedData);
+
+ if (async && callback) callback(compressedData);
+ return compressedData;
+ default:
+ case Utils.Constants.DEFLATED:
+
+ var deflater = new Methods.Deflater(uncompressedData);
+ if (!async) {
+ var deflated = deflater.deflate();
+ _entryHeader.compressedSize = deflated.length;
+ return deflated;
+ } else {
+ deflater.deflateAsync(function(data) {
+ compressedData = Buffer.alloc(data.length);
+ _entryHeader.compressedSize = data.length;
+ data.copy(compressedData);
+ callback && callback(compressedData);
+ })
+ }
+ deflater = null;
+ break;
+ }
+ } else {
+ if (async && callback) {
+ callback(Buffer.alloc(0));
+ } else {
+ return Buffer.alloc(0);
+ }
+ }
}
- if (node.value) {
- return node.value;
+ function readUInt64LE(buffer, offset) {
+ return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
}
- if (node.nodes) {
- for (let child of node.nodes) {
- output += stringify(child);
- }
+ function parseExtra(data) {
+ var offset = 0;
+ var signature, size, part;
+ while(offset= Constants.EF_ZIP64_SCOMP) {
+ size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
+ if(_entryHeader.size === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.size = size;
+ }
+ }
+ if(data.length >= Constants.EF_ZIP64_RHO) {
+ compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
+ if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.compressedSize = compressedSize;
+ }
+ }
+ if(data.length >= Constants.EF_ZIP64_DSN) {
+ offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
+ if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.offset = offset;
+ }
+ }
+ if(data.length >= Constants.EF_ZIP64_DSN+4) {
+ diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
+ if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
+ _entryHeader.diskNumStart = diskNumStart;
+ }
+ }
+ }
+ return {
+ get entryName () { return _entryName.toString(); },
+ get rawEntryName() { return _entryName; },
+ set entryName (val) {
+ _entryName = Utils.toBuffer(val);
+ var lastChar = _entryName[_entryName.length - 1];
+ _isDirectory = (lastChar === 47) || (lastChar === 92);
+ _entryHeader.fileNameLength = _entryName.length;
+ },
-/***/ }),
+ get extra () { return _extra; },
+ set extra (val) {
+ _extra = val;
+ _entryHeader.extraLength = val.length;
+ parseExtra(val);
+ },
-/***/ 7691:
-/***/ ((__unused_webpack_module, exports) => {
+ get comment () { return _comment.toString(); },
+ set comment (val) {
+ _comment = Utils.toBuffer(val);
+ _entryHeader.commentLength = _comment.length;
+ },
-"use strict";
+ get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); },
+ get isDirectory () { return _isDirectory },
+ getCompressedData : function() {
+ return compress(false, null)
+ },
-exports.isInteger = num => {
- if (typeof num === 'number') {
- return Number.isInteger(num);
- }
- if (typeof num === 'string' && num.trim() !== '') {
- return Number.isInteger(Number(num));
- }
- return false;
-};
+ getCompressedDataAsync : function(/*Function*/callback) {
+ compress(true, callback)
+ },
-/**
- * Find a node of the given type
- */
+ setData : function(value) {
+ uncompressedData = Utils.toBuffer(value);
+ if (!_isDirectory && uncompressedData.length) {
+ _entryHeader.size = uncompressedData.length;
+ _entryHeader.method = Utils.Constants.DEFLATED;
+ _entryHeader.crc = Utils.crc32(value);
+ _entryHeader.changed = true;
+ } else { // folders and blank files should be stored
+ _entryHeader.method = Utils.Constants.STORED;
+ }
+ },
-exports.find = (node, type) => node.nodes.find(node => node.type === type);
+ getData : function(pass) {
+ if (_entryHeader.changed) {
+ return uncompressedData;
+ } else {
+ return decompress(false, null, pass);
+ }
+ },
-/**
- * Find a node of the given type
- */
+ getDataAsync : function(/*Function*/callback, pass) {
+ if (_entryHeader.changed) {
+ callback(uncompressedData)
+ } else {
+ decompress(true, callback, pass)
+ }
+ },
-exports.exceedsLimit = (min, max, step = 1, limit) => {
- if (limit === false) return false;
- if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
- return ((Number(max) - Number(min)) / Number(step)) >= limit;
-};
+ set attr(attr) { _entryHeader.attr = attr; },
+ get attr() { return _entryHeader.attr; },
-/**
- * Escape the given node with '\\' before node.value
- */
+ set header(/*Buffer*/data) {
+ _entryHeader.loadFromBinary(data);
+ },
-exports.escapeNode = (block, n = 0, type) => {
- let node = block.nodes[n];
- if (!node) return;
+ get header() {
+ return _entryHeader;
+ },
- if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
- if (node.escaped !== true) {
- node.value = '\\' + node.value;
- node.escaped = true;
+ packHeader : function() {
+ var header = _entryHeader.entryHeaderToBinary();
+ // add
+ _entryName.copy(header, Utils.Constants.CENHDR);
+ if (_entryHeader.extraLength) {
+ _extra.copy(header, Utils.Constants.CENHDR + _entryName.length)
+ }
+ if (_entryHeader.commentLength) {
+ _comment.copy(header, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length);
+ }
+ return header;
+ },
+
+ toString : function() {
+ return '{\n' +
+ '\t"entryName" : "' + _entryName.toString() + "\",\n" +
+ '\t"name" : "' + (_isDirectory ? _entryName.toString().replace(/\/$/, '').split("/").pop() : _entryName.toString().split("/").pop()) + "\",\n" +
+ '\t"comment" : "' + _comment.toString() + "\",\n" +
+ '\t"isDirectory" : ' + _isDirectory + ",\n" +
+ '\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t").replace(/}/mg, "\t}") + ",\n" +
+ '\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" +
+ '\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" +
+ '}';
+ }
}
- }
};
-/**
- * Returns true if the given brace node should be enclosed in literal braces
- */
-exports.encloseBrace = node => {
- if (node.type !== 'brace') return false;
- if ((node.commas >> 0 + node.ranges >> 0) === 0) {
- node.invalid = true;
- return true;
- }
- return false;
-};
+/***/ }),
-/**
- * Returns true if a brace node is invalid.
- */
+/***/ 7744:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-exports.isInvalidBrace = block => {
- if (block.type !== 'brace') return false;
- if (block.invalid === true || block.dollar) return true;
- if ((block.commas >> 0 + block.ranges >> 0) === 0) {
- block.invalid = true;
- return true;
- }
- if (block.open !== true || block.close !== true) {
- block.invalid = true;
- return true;
- }
- return false;
-};
+var ZipEntry = __nccwpck_require__(4057),
+ Headers = __nccwpck_require__(4958),
+ Utils = __nccwpck_require__(5182);
+
+module.exports = function (/*String|Buffer*/input, /*Number*/inputType) {
+ var entryList = [],
+ entryTable = {},
+ _comment = Buffer.alloc(0),
+ filename = "",
+ fs = Utils.FileSystem.require(),
+ inBuffer = null,
+ mainHeader = new Headers.MainHeader(),
+ loadedEntries = false;
+
+ if (inputType === Utils.Constants.FILE) {
+ // is a filename
+ filename = input;
+ inBuffer = fs.readFileSync(filename);
+ readMainHeader();
+ } else if (inputType === Utils.Constants.BUFFER) {
+ // is a memory buffer
+ inBuffer = input;
+ readMainHeader();
+ } else {
+ // none. is a new file
+ loadedEntries = true;
+ }
-/**
- * Returns true if a node is an open or close node
- */
+ function iterateEntries(callback) {
+ const totalEntries = mainHeader.diskEntries; // total number of entries
+ let index = mainHeader.offset; // offset of first CEN header
-exports.isOpenOrClose = node => {
- if (node.type === 'open' || node.type === 'close') {
- return true;
- }
- return node.open === true || node.close === true;
-};
+ for (let i = 0; i < totalEntries; i++) {
+ let tmp = index;
+ const entry = new ZipEntry(inBuffer);
-/**
- * Reduce an array of text nodes.
- */
+ entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
+ entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
-exports.reduce = nodes => nodes.reduce((acc, node) => {
- if (node.type === 'text') acc.push(node.value);
- if (node.type === 'range') node.type = 'text';
- return acc;
-}, []);
+ index += entry.header.entryHeaderSize;
-/**
- * Flatten an array
- */
+ callback(entry);
+ }
+ }
-exports.flatten = (...args) => {
- const result = [];
- const flat = arr => {
- for (let i = 0; i < arr.length; i++) {
- let ele = arr[i];
- Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
- }
- return result;
- };
- flat(args);
- return result;
-};
+ function readEntries() {
+ loadedEntries = true;
+ entryTable = {};
+ entryList = new Array(mainHeader.diskEntries); // total number of entries
+ var index = mainHeader.offset; // offset of first CEN header
+ for (var i = 0; i < entryList.length; i++) {
+ var tmp = index,
+ entry = new ZipEntry(inBuffer);
+ entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
-/***/ }),
+ entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
-/***/ 791:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ if (entry.header.extraLength) {
+ entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);
+ }
-"use strict";
-/*!
- * fill-range
- *
- * Copyright (c) 2014-present, Jon Schlinkert.
- * Licensed under the MIT License.
- */
+ if (entry.header.commentLength)
+ entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
+ index += entry.header.entryHeaderSize;
+ entryList[i] = entry;
+ entryTable[entry.entryName] = entry;
+ }
+ }
-const util = __nccwpck_require__(1669);
-const toRegexRange = __nccwpck_require__(6867);
+ function readMainHeader() {
+ var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
+ max = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length
+ n = max,
+ endStart = inBuffer.length,
+ endOffset = -1, // Start offset of the END header
+ commentEnd = 0;
+
+ for (i; i >= n; i--) {
+ if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006"
+ endOffset = i;
+ commentEnd = i;
+ endStart = i + Utils.Constants.ENDHDR;
+ // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
+ n = i - Utils.Constants.END64HDR;
+ continue;
+ }
-const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
+ // Found a zip64 signature, let's continue reading the whole zip64 record
+ n = max;
+ continue;
+ }
-const transform = toNumber => {
- return value => toNumber === true ? Number(value) : String(value);
-};
+ if (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) {
+ // Found the zip64 record, let's determine it's size
+ endOffset = i;
+ endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
+ break;
+ }
+ }
-const isValidValue = value => {
- return typeof value === 'number' || (typeof value === 'string' && value !== '');
-};
+ if (!~endOffset)
+ throw new Error(Utils.Errors.INVALID_FORMAT);
-const isNumber = num => Number.isInteger(+num);
+ mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
+ if (mainHeader.commentLength) {
+ _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
+ }
+ // readEntries();
+ }
-const zeros = input => {
- let value = `${input}`;
- let index = -1;
- if (value[0] === '-') value = value.slice(1);
- if (value === '0') return false;
- while (value[++index] === '0');
- return index > 0;
-};
+ return {
+ /**
+ * Returns an array of ZipEntry objects existent in the current opened archive
+ * @return Array
+ */
+ get entries() {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ return entryList;
+ },
+
+ /**
+ * Archive comment
+ * @return {String}
+ */
+ get comment() {
+ return _comment.toString();
+ },
+ set comment(val) {
+ _comment = Utils.toBuffer(val);
+ mainHeader.commentLength = _comment.length;
+ },
+
+ getEntryCount: function() {
+ if (!loadedEntries) {
+ return mainHeader.diskEntries;
+ }
-const stringify = (start, end, options) => {
- if (typeof start === 'string' || typeof end === 'string') {
- return true;
- }
- return options.stringify === true;
-};
+ return entryList.length;
+ },
-const pad = (input, maxLength, toNumber) => {
- if (maxLength > 0) {
- let dash = input[0] === '-' ? '-' : '';
- if (dash) input = input.slice(1);
- input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
- }
- if (toNumber === false) {
- return String(input);
- }
- return input;
-};
+ forEach: function(callback) {
+ if (!loadedEntries) {
+ iterateEntries(callback);
+ return;
+ }
-const toMaxLen = (input, maxLength) => {
- let negative = input[0] === '-' ? '-' : '';
- if (negative) {
- input = input.slice(1);
- maxLength--;
- }
- while (input.length < maxLength) input = '0' + input;
- return negative ? ('-' + input) : input;
-};
-
-const toSequence = (parts, options) => {
- parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
- parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+ entryList.forEach(callback);
+ },
+
+ /**
+ * Returns a reference to the entry with the given name or null if entry is inexistent
+ *
+ * @param entryName
+ * @return ZipEntry
+ */
+ getEntry: function (/*String*/entryName) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ return entryTable[entryName] || null;
+ },
+
+ /**
+ * Adds the given entry to the entry list
+ *
+ * @param entry
+ */
+ setEntry: function (/*ZipEntry*/entry) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ entryList.push(entry);
+ entryTable[entry.entryName] = entry;
+ mainHeader.totalEntries = entryList.length;
+ },
+
+ /**
+ * Removes the entry with the given name from the entry list.
+ *
+ * If the entry is a directory, then all nested files and directories will be removed
+ * @param entryName
+ */
+ deleteEntry: function (/*String*/entryName) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ var entry = entryTable[entryName];
+ if (entry && entry.isDirectory) {
+ var _self = this;
+ this.getEntryChildren(entry).forEach(function (child) {
+ if (child.entryName !== entryName) {
+ _self.deleteEntry(child.entryName)
+ }
+ })
+ }
+ entryList.splice(entryList.indexOf(entry), 1);
+ delete(entryTable[entryName]);
+ mainHeader.totalEntries = entryList.length;
+ },
+
+ /**
+ * Iterates and returns all nested files and directories of the given entry
+ *
+ * @param entry
+ * @return Array
+ */
+ getEntryChildren: function (/*ZipEntry*/entry) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ if (entry.isDirectory) {
+ var list = [],
+ name = entry.entryName,
+ len = name.length;
+
+ entryList.forEach(function (zipEntry) {
+ if (zipEntry.entryName.substr(0, len) === name) {
+ list.push(zipEntry);
+ }
+ });
+ return list;
+ }
+ return []
+ },
+
+ /**
+ * Returns the zip file
+ *
+ * @return Buffer
+ */
+ compressToBuffer: function () {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ if (entryList.length > 1) {
+ entryList.sort(function (a, b) {
+ var nameA = a.entryName.toLowerCase();
+ var nameB = b.entryName.toLowerCase();
+ if (nameA < nameB) {
+ return -1
+ }
+ if (nameA > nameB) {
+ return 1
+ }
+ return 0;
+ });
+ }
- let prefix = options.capture ? '' : '?:';
- let positives = '';
- let negatives = '';
- let result;
+ var totalSize = 0,
+ dataBlock = [],
+ entryHeaders = [],
+ dindex = 0;
+
+ mainHeader.size = 0;
+ mainHeader.offset = 0;
+
+ entryList.forEach(function (entry) {
+ // compress data and set local and entry header accordingly. Reason why is called first
+ var compressedData = entry.getCompressedData();
+ // data header
+ entry.header.offset = dindex;
+ var dataHeader = entry.header.dataHeaderToBinary();
+ var entryNameLen = entry.rawEntryName.length;
+ var extra = entry.extra.toString();
+ var postHeader = Buffer.alloc(entryNameLen + extra.length);
+ entry.rawEntryName.copy(postHeader, 0);
+ postHeader.fill(extra, entryNameLen);
+
+ var dataLength = dataHeader.length + postHeader.length + compressedData.length;
+
+ dindex += dataLength;
+
+ dataBlock.push(dataHeader);
+ dataBlock.push(postHeader);
+ dataBlock.push(compressedData);
+
+ var entryHeader = entry.packHeader();
+ entryHeaders.push(entryHeader);
+ mainHeader.size += entryHeader.length;
+ totalSize += (dataLength + entryHeader.length);
+ });
- if (parts.positives.length) {
- positives = parts.positives.join('|');
- }
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
+ // point to end of data and beginning of central directory first record
+ mainHeader.offset = dindex;
- if (parts.negatives.length) {
- negatives = `-(${prefix}${parts.negatives.join('|')})`;
- }
+ dindex = 0;
+ var outBuffer = Buffer.alloc(totalSize);
+ dataBlock.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write data blocks
+ dindex += content.length;
+ });
+ entryHeaders.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write central directory entries
+ dindex += content.length;
+ });
- if (positives && negatives) {
- result = `${positives}|${negatives}`;
- } else {
- result = positives || negatives;
- }
+ var mh = mainHeader.toBinary();
+ if (_comment) {
+ Buffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment
+ }
- if (options.wrap) {
- return `(${prefix}${result})`;
- }
+ mh.copy(outBuffer, dindex); // write main header
- return result;
-};
+ return outBuffer
+ },
-const toRange = (a, b, isNumbers, options) => {
- if (isNumbers) {
- return toRegexRange(a, b, { wrap: false, ...options });
- }
+ toAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ if (entryList.length > 1) {
+ entryList.sort(function (a, b) {
+ var nameA = a.entryName.toLowerCase();
+ var nameB = b.entryName.toLowerCase();
+ if (nameA > nameB) {
+ return -1
+ }
+ if (nameA < nameB) {
+ return 1
+ }
+ return 0;
+ });
+ }
- let start = String.fromCharCode(a);
- if (a === b) return start;
+ var totalSize = 0,
+ dataBlock = [],
+ entryHeaders = [],
+ dindex = 0;
+
+ mainHeader.size = 0;
+ mainHeader.offset = 0;
+
+ var compress = function (entryList) {
+ var self = arguments.callee;
+ if (entryList.length) {
+ var entry = entryList.pop();
+ var name = entry.entryName + entry.extra.toString();
+ if (onItemStart) onItemStart(name);
+ entry.getCompressedDataAsync(function (compressedData) {
+ if (onItemEnd) onItemEnd(name);
+
+ entry.header.offset = dindex;
+ // data header
+ var dataHeader = entry.header.dataHeaderToBinary();
+ var postHeader;
+ try {
+ postHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+
+ } catch(e){
+ postHeader = new Buffer(name); // use deprecated method if alloc fails...
+ }
+ var dataLength = dataHeader.length + postHeader.length + compressedData.length;
+
+ dindex += dataLength;
+
+ dataBlock.push(dataHeader);
+ dataBlock.push(postHeader);
+ dataBlock.push(compressedData);
+
+ var entryHeader = entry.packHeader();
+ entryHeaders.push(entryHeader);
+ mainHeader.size += entryHeader.length;
+ totalSize += (dataLength + entryHeader.length);
+
+ if (entryList.length) {
+ self(entryList);
+ } else {
+
+
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
+ // point to end of data and beginning of central directory first record
+ mainHeader.offset = dindex;
+
+ dindex = 0;
+ var outBuffer = Buffer.alloc(totalSize);
+ dataBlock.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write data blocks
+ dindex += content.length;
+ });
+ entryHeaders.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write central directory entries
+ dindex += content.length;
+ });
+
+ var mh = mainHeader.toBinary();
+ if (_comment) {
+ _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
+ }
- let stop = String.fromCharCode(b);
- return `[${start}-${stop}]`;
-};
+ mh.copy(outBuffer, dindex); // write main header
-const toRegex = (start, end, options) => {
- if (Array.isArray(start)) {
- let wrap = options.wrap === true;
- let prefix = options.capture ? '' : '?:';
- return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
- }
- return toRegexRange(start, end, options);
-};
+ onSuccess(outBuffer);
+ }
+ });
+ }
+ };
-const rangeError = (...args) => {
- return new RangeError('Invalid range arguments: ' + util.inspect(...args));
+ compress(entryList);
+ }
+ }
};
-const invalidRange = (start, end, options) => {
- if (options.strictRanges === true) throw rangeError([start, end]);
- return [];
-};
-const invalidStep = (step, options) => {
- if (options.strictRanges === true) {
- throw new TypeError(`Expected step "${step}" to be a number`);
- }
- return [];
-};
+/***/ }),
-const fillNumbers = (start, end, step = 1, options = {}) => {
- let a = Number(start);
- let b = Number(end);
+/***/ 3682:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (!Number.isInteger(a) || !Number.isInteger(b)) {
- if (options.strictRanges === true) throw rangeError([start, end]);
- return [];
- }
+var register = __nccwpck_require__(4670)
+var addHook = __nccwpck_require__(5549)
+var removeHook = __nccwpck_require__(6819)
- // fix negative zero
- if (a === 0) a = 0;
- if (b === 0) b = 0;
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind
+var bindable = bind.bind(bind)
- let descending = a > b;
- let startString = String(start);
- let endString = String(end);
- let stepString = String(step);
- step = Math.max(Math.abs(step), 1);
+function bindApi (hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
+ hook.api = { remove: removeHookRef }
+ hook.remove = removeHookRef
- let padded = zeros(startString) || zeros(endString) || zeros(stepString);
- let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
- let toNumber = padded === false && stringify(start, end, options) === false;
- let format = options.transform || transform(toNumber);
+ ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind]
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
+ })
+}
- if (options.toRegex && step === 1) {
- return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
+function HookSingular () {
+ var singularHookName = 'h'
+ var singularHookState = {
+ registry: {}
}
+ var singularHook = register.bind(null, singularHookState, singularHookName)
+ bindApi(singularHook, singularHookState, singularHookName)
+ return singularHook
+}
- let parts = { negatives: [], positives: [] };
- let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
- let range = [];
- let index = 0;
-
- while (descending ? a >= b : a <= b) {
- if (options.toRegex === true && step > 1) {
- push(a);
- } else {
- range.push(pad(format(a, index), maxLen, toNumber));
- }
- a = descending ? a - step : a + step;
- index++;
+function HookCollection () {
+ var state = {
+ registry: {}
}
- if (options.toRegex === true) {
- return step > 1
- ? toSequence(parts, options)
- : toRegex(range, null, { wrap: false, ...options });
- }
+ var hook = register.bind(null, state)
+ bindApi(hook, state)
- return range;
-};
+ return hook
+}
-const fillLetters = (start, end, step = 1, options = {}) => {
- if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
- return invalidRange(start, end, options);
+var collectionHookDeprecationMessageDisplayed = false
+function Hook () {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
+ collectionHookDeprecationMessageDisplayed = true
}
+ return HookCollection()
+}
+Hook.Singular = HookSingular.bind()
+Hook.Collection = HookCollection.bind()
- let format = options.transform || (val => String.fromCharCode(val));
- let a = `${start}`.charCodeAt(0);
- let b = `${end}`.charCodeAt(0);
+module.exports = Hook
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook
+module.exports.Singular = Hook.Singular
+module.exports.Collection = Hook.Collection
- let descending = a > b;
- let min = Math.min(a, b);
- let max = Math.max(a, b);
- if (options.toRegex && step === 1) {
- return toRange(min, max, false, options);
- }
+/***/ }),
- let range = [];
- let index = 0;
+/***/ 5549:
+/***/ ((module) => {
- while (descending ? a >= b : a <= b) {
- range.push(format(a, index));
- a = descending ? a - step : a + step;
- index++;
- }
+module.exports = addHook
- if (options.toRegex === true) {
- return toRegex(range, null, { wrap: false, options });
+function addHook (state, kind, name, hook) {
+ var orig = hook
+ if (!state.registry[name]) {
+ state.registry[name] = []
}
- return range;
-};
-
-const fill = (start, end, step, options = {}) => {
- if (end == null && isValidValue(start)) {
- return [start];
+ if (kind === 'before') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options))
+ }
}
- if (!isValidValue(start) || !isValidValue(end)) {
- return invalidRange(start, end, options);
+ if (kind === 'after') {
+ hook = function (method, options) {
+ var result
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_
+ return orig(result, options)
+ })
+ .then(function () {
+ return result
+ })
+ }
}
- if (typeof step === 'function') {
- return fill(start, end, 1, { transform: step });
+ if (kind === 'error') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options)
+ })
+ }
}
- if (isObject(step)) {
- return fill(start, end, 0, step);
- }
+ state.registry[name].push({
+ hook: hook,
+ orig: orig
+ })
+}
- let opts = { ...options };
- if (opts.capture === true) opts.wrap = true;
- step = step || opts.step || 1;
- if (!isNumber(step)) {
- if (step != null && !isObject(step)) return invalidStep(step, opts);
- return fill(start, end, 1, step);
- }
+/***/ }),
- if (isNumber(start) && isNumber(end)) {
- return fillNumbers(start, end, step, opts);
+/***/ 4670:
+/***/ ((module) => {
+
+module.exports = register
+
+function register (state, name, method, options) {
+ if (typeof method !== 'function') {
+ throw new Error('method for before hook must be a function')
}
- return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
-};
+ if (!options) {
+ options = {}
+ }
-module.exports = fill;
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options)
+ }, method)()
+ }
+
+ return Promise.resolve()
+ .then(function () {
+ if (!state.registry[name]) {
+ return method(options)
+ }
+
+ return (state.registry[name]).reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options)
+ }, method)()
+ })
+}
/***/ }),
-/***/ 2840:
+/***/ 6819:
/***/ ((module) => {
-"use strict";
-/*!
- * is-number
- *
- * Copyright (c) 2014-present, Jon Schlinkert.
- * Released under the MIT License.
- */
+module.exports = removeHook
+function removeHook (state, name, method) {
+ if (!state.registry[name]) {
+ return
+ }
+ var index = state.registry[name]
+ .map(function (registered) { return registered.orig })
+ .indexOf(method)
-module.exports = function(num) {
- if (typeof num === 'number') {
- return num - num === 0;
- }
- if (typeof num === 'string' && num.trim() !== '') {
- return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
+ if (index === -1) {
+ return
}
- return false;
-};
+
+ state.registry[name].splice(index, 1)
+}
/***/ }),
-/***/ 3913:
+/***/ 2286:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+const {
+ V4MAPPED,
+ ADDRCONFIG,
+ ALL,
+ promises: {
+ Resolver: AsyncResolver
+ },
+ lookup: dnsLookup
+} = __nccwpck_require__(881);
+const {promisify} = __nccwpck_require__(1669);
+const os = __nccwpck_require__(2087);
-const util = __nccwpck_require__(1669);
-const braces = __nccwpck_require__(5582);
-const picomatch = __nccwpck_require__(8569);
-const utils = __nccwpck_require__(479);
-const isEmptyString = val => typeof val === 'string' && (val === '' || val === './');
+const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection');
+const kCacheableLookupInstance = Symbol('cacheableLookupInstance');
+const kExpires = Symbol('expires');
-/**
- * Returns an array of strings that match one or more glob patterns.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm(list, patterns[, options]);
- *
- * console.log(mm(['a.js', 'a.txt'], ['*.js']));
- * //=> [ 'a.js' ]
- * ```
- * @param {String|Array} list List of strings to match.
- * @param {String|Array} patterns One or more glob patterns to use for matching.
- * @param {Object} options See available [options](#options)
- * @return {Array} Returns an array of matches
- * @summary false
- * @api public
- */
+const supportsALL = typeof ALL === 'number';
-const micromatch = (list, patterns, options) => {
- patterns = [].concat(patterns);
- list = [].concat(list);
+const verifyAgent = agent => {
+ if (!(agent && typeof agent.createConnection === 'function')) {
+ throw new Error('Expected an Agent instance as the first argument');
+ }
+};
- let omit = new Set();
- let keep = new Set();
- let items = new Set();
- let negatives = 0;
+const map4to6 = entries => {
+ for (const entry of entries) {
+ if (entry.family === 6) {
+ continue;
+ }
- let onResult = state => {
- items.add(state.output);
- if (options && options.onResult) {
- options.onResult(state);
- }
- };
+ entry.address = `::ffff:${entry.address}`;
+ entry.family = 6;
+ }
+};
- for (let i = 0; i < patterns.length; i++) {
- let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);
- let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
- if (negated) negatives++;
+const getIfaceInfo = () => {
+ let has4 = false;
+ let has6 = false;
- for (let item of list) {
- let matched = isMatch(item, true);
+ for (const device of Object.values(os.networkInterfaces())) {
+ for (const iface of device) {
+ if (iface.internal) {
+ continue;
+ }
- let match = negated ? !matched.isMatch : matched.isMatch;
- if (!match) continue;
+ if (iface.family === 'IPv6') {
+ has6 = true;
+ } else {
+ has4 = true;
+ }
- if (negated) {
- omit.add(matched.output);
- } else {
- omit.delete(matched.output);
- keep.add(matched.output);
- }
- }
- }
+ if (has4 && has6) {
+ return {has4, has6};
+ }
+ }
+ }
- let result = negatives === patterns.length ? [...items] : [...keep];
- let matches = result.filter(item => !omit.has(item));
+ return {has4, has6};
+};
- if (options && matches.length === 0) {
- if (options.failglob === true) {
- throw new Error(`No matches found for "${patterns.join(', ')}"`);
- }
+const isIterable = map => {
+ return Symbol.iterator in map;
+};
- if (options.nonull === true || options.nullglob === true) {
- return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
- }
- }
+const ttl = {ttl: true};
+const all = {all: true};
+
+class CacheableLookup {
+ constructor({
+ cache = new Map(),
+ maxTtl = Infinity,
+ fallbackDuration = 3600,
+ errorTtl = 0.15,
+ resolver = new AsyncResolver(),
+ lookup = dnsLookup
+ } = {}) {
+ this.maxTtl = maxTtl;
+ this.errorTtl = errorTtl;
+
+ this._cache = cache;
+ this._resolver = resolver;
+ this._dnsLookup = promisify(lookup);
+
+ if (this._resolver instanceof AsyncResolver) {
+ this._resolve4 = this._resolver.resolve4.bind(this._resolver);
+ this._resolve6 = this._resolver.resolve6.bind(this._resolver);
+ } else {
+ this._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver));
+ this._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver));
+ }
- return matches;
-};
+ this._iface = getIfaceInfo();
-/**
- * Backwards compatibility
- */
+ this._pending = {};
+ this._nextRemovalTime = false;
+ this._hostnamesToFallback = new Set();
-micromatch.match = micromatch;
+ if (fallbackDuration < 1) {
+ this._fallback = false;
+ } else {
+ this._fallback = true;
-/**
- * Returns a matcher function from the given glob `pattern` and `options`.
- * The returned function takes a string to match as its only argument and returns
- * true if the string is a match.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.matcher(pattern[, options]);
- *
- * const isMatch = mm.matcher('*.!(*a)');
- * console.log(isMatch('a.a')); //=> false
- * console.log(isMatch('a.b')); //=> true
- * ```
- * @param {String} `pattern` Glob pattern
- * @param {Object} `options`
- * @return {Function} Returns a matcher function.
- * @api public
- */
+ const interval = setInterval(() => {
+ this._hostnamesToFallback.clear();
+ }, fallbackDuration * 1000);
-micromatch.matcher = (pattern, options) => picomatch(pattern, options);
+ /* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */
+ if (interval.unref) {
+ interval.unref();
+ }
+ }
-/**
- * Returns true if **any** of the given glob `patterns` match the specified `string`.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.isMatch(string, patterns[, options]);
- *
- * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
- * console.log(mm.isMatch('a.a', 'b.*')); //=> false
- * ```
- * @param {String} str The string to test.
- * @param {String|Array} patterns One or more glob patterns to use for matching.
- * @param {Object} [options] See available [options](#options).
- * @return {Boolean} Returns true if any patterns match `str`
- * @api public
- */
+ this.lookup = this.lookup.bind(this);
+ this.lookupAsync = this.lookupAsync.bind(this);
+ }
-micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+ set servers(servers) {
+ this.clear();
-/**
- * Backwards compatibility
- */
+ this._resolver.setServers(servers);
+ }
-micromatch.any = micromatch.isMatch;
+ get servers() {
+ return this._resolver.getServers();
+ }
-/**
- * Returns a list of strings that _**do not match any**_ of the given `patterns`.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.not(list, patterns[, options]);
- *
- * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
- * //=> ['b.b', 'c.c']
- * ```
- * @param {Array} `list` Array of strings to match.
- * @param {String|Array} `patterns` One or more glob pattern to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Array} Returns an array of strings that **do not match** the given patterns.
- * @api public
- */
+ lookup(hostname, options, callback) {
+ if (typeof options === 'function') {
+ callback = options;
+ options = {};
+ } else if (typeof options === 'number') {
+ options = {
+ family: options
+ };
+ }
-micromatch.not = (list, patterns, options = {}) => {
- patterns = [].concat(patterns).map(String);
- let result = new Set();
- let items = [];
+ if (!callback) {
+ throw new Error('Callback must be a function.');
+ }
- let onResult = state => {
- if (options.onResult) options.onResult(state);
- items.push(state.output);
- };
+ // eslint-disable-next-line promise/prefer-await-to-then
+ this.lookupAsync(hostname, options).then(result => {
+ if (options.all) {
+ callback(null, result);
+ } else {
+ callback(null, result.address, result.family, result.expires, result.ttl);
+ }
+ }, callback);
+ }
- let matches = micromatch(list, patterns, { ...options, onResult });
+ async lookupAsync(hostname, options = {}) {
+ if (typeof options === 'number') {
+ options = {
+ family: options
+ };
+ }
- for (let item of items) {
- if (!matches.includes(item)) {
- result.add(item);
- }
- }
- return [...result];
-};
+ let cached = await this.query(hostname);
-/**
- * Returns true if the given `string` contains the given pattern. Similar
- * to [.isMatch](#isMatch) but the pattern can match any part of the string.
- *
- * ```js
- * var mm = require('micromatch');
- * // mm.contains(string, pattern[, options]);
- *
- * console.log(mm.contains('aa/bb/cc', '*b'));
- * //=> true
- * console.log(mm.contains('aa/bb/cc', '*d'));
- * //=> false
- * ```
- * @param {String} `str` The string to match.
- * @param {String|Array} `patterns` Glob pattern to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Boolean} Returns true if the patter matches any part of `str`.
- * @api public
- */
+ if (options.family === 6) {
+ const filtered = cached.filter(entry => entry.family === 6);
+
+ if (options.hints & V4MAPPED) {
+ if ((supportsALL && options.hints & ALL) || filtered.length === 0) {
+ map4to6(cached);
+ } else {
+ cached = filtered;
+ }
+ } else {
+ cached = filtered;
+ }
+ } else if (options.family === 4) {
+ cached = cached.filter(entry => entry.family === 4);
+ }
+
+ if (options.hints & ADDRCONFIG) {
+ const {_iface} = this;
+ cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4);
+ }
+
+ if (cached.length === 0) {
+ const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`);
+ error.code = 'ENOTFOUND';
+ error.hostname = hostname;
+
+ throw error;
+ }
+
+ if (options.all) {
+ return cached;
+ }
+
+ return cached[0];
+ }
+
+ async query(hostname) {
+ let cached = await this._cache.get(hostname);
+
+ if (!cached) {
+ const pending = this._pending[hostname];
+
+ if (pending) {
+ cached = await pending;
+ } else {
+ const newPromise = this.queryAndCache(hostname);
+ this._pending[hostname] = newPromise;
+
+ try {
+ cached = await newPromise;
+ } finally {
+ delete this._pending[hostname];
+ }
+ }
+ }
+
+ cached = cached.map(entry => {
+ return {...entry};
+ });
+
+ return cached;
+ }
+
+ async _resolve(hostname) {
+ const wrap = async promise => {
+ try {
+ return await promise;
+ } catch (error) {
+ if (error.code === 'ENODATA' || error.code === 'ENOTFOUND') {
+ return [];
+ }
+
+ throw error;
+ }
+ };
+
+ // ANY is unsafe as it doesn't trigger new queries in the underlying server.
+ const [A, AAAA] = await Promise.all([
+ this._resolve4(hostname, ttl),
+ this._resolve6(hostname, ttl)
+ ].map(promise => wrap(promise)));
+
+ let aTtl = 0;
+ let aaaaTtl = 0;
+ let cacheTtl = 0;
+
+ const now = Date.now();
+
+ for (const entry of A) {
+ entry.family = 4;
+ entry.expires = now + (entry.ttl * 1000);
+
+ aTtl = Math.max(aTtl, entry.ttl);
+ }
+
+ for (const entry of AAAA) {
+ entry.family = 6;
+ entry.expires = now + (entry.ttl * 1000);
+
+ aaaaTtl = Math.max(aaaaTtl, entry.ttl);
+ }
+
+ if (A.length > 0) {
+ if (AAAA.length > 0) {
+ cacheTtl = Math.min(aTtl, aaaaTtl);
+ } else {
+ cacheTtl = aTtl;
+ }
+ } else {
+ cacheTtl = aaaaTtl;
+ }
+
+ return {
+ entries: [
+ ...A,
+ ...AAAA
+ ],
+ cacheTtl
+ };
+ }
+
+ async _lookup(hostname) {
+ try {
+ const entries = await this._dnsLookup(hostname, {
+ all: true
+ });
+
+ return {
+ entries,
+ cacheTtl: 0
+ };
+ } catch (_) {
+ return {
+ entries: [],
+ cacheTtl: 0
+ };
+ }
+ }
+
+ async _set(hostname, data, cacheTtl) {
+ if (this.maxTtl > 0 && cacheTtl > 0) {
+ cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000;
+ data[kExpires] = Date.now() + cacheTtl;
+
+ try {
+ await this._cache.set(hostname, data, cacheTtl);
+ } catch (error) {
+ this.lookupAsync = async () => {
+ const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.');
+ cacheError.cause = error;
+
+ throw cacheError;
+ };
+ }
+
+ if (isIterable(this._cache)) {
+ this._tick(cacheTtl);
+ }
+ }
+ }
+
+ async queryAndCache(hostname) {
+ if (this._hostnamesToFallback.has(hostname)) {
+ return this._dnsLookup(hostname, all);
+ }
+
+ let query = await this._resolve(hostname);
+
+ if (query.entries.length === 0 && this._fallback) {
+ query = await this._lookup(hostname);
+
+ if (query.entries.length !== 0) {
+ // Use `dns.lookup(...)` for that particular hostname
+ this._hostnamesToFallback.add(hostname);
+ }
+ }
+
+ const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl;
+ await this._set(hostname, query.entries, cacheTtl);
+
+ return query.entries;
+ }
+
+ _tick(ms) {
+ const nextRemovalTime = this._nextRemovalTime;
+
+ if (!nextRemovalTime || ms < nextRemovalTime) {
+ clearTimeout(this._removalTimeout);
+
+ this._nextRemovalTime = ms;
+
+ this._removalTimeout = setTimeout(() => {
+ this._nextRemovalTime = false;
+
+ let nextExpiry = Infinity;
+
+ const now = Date.now();
+
+ for (const [hostname, entries] of this._cache) {
+ const expires = entries[kExpires];
+
+ if (now >= expires) {
+ this._cache.delete(hostname);
+ } else if (expires < nextExpiry) {
+ nextExpiry = expires;
+ }
+ }
+
+ if (nextExpiry !== Infinity) {
+ this._tick(nextExpiry - now);
+ }
+ }, ms);
+
+ /* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */
+ if (this._removalTimeout.unref) {
+ this._removalTimeout.unref();
+ }
+ }
+ }
+
+ install(agent) {
+ verifyAgent(agent);
+
+ if (kCacheableLookupCreateConnection in agent) {
+ throw new Error('CacheableLookup has been already installed');
+ }
+
+ agent[kCacheableLookupCreateConnection] = agent.createConnection;
+ agent[kCacheableLookupInstance] = this;
+
+ agent.createConnection = (options, callback) => {
+ if (!('lookup' in options)) {
+ options.lookup = this.lookup;
+ }
+
+ return agent[kCacheableLookupCreateConnection](options, callback);
+ };
+ }
+
+ uninstall(agent) {
+ verifyAgent(agent);
+
+ if (agent[kCacheableLookupCreateConnection]) {
+ if (agent[kCacheableLookupInstance] !== this) {
+ throw new Error('The agent is not owned by this CacheableLookup instance');
+ }
+
+ agent.createConnection = agent[kCacheableLookupCreateConnection];
+
+ delete agent[kCacheableLookupCreateConnection];
+ delete agent[kCacheableLookupInstance];
+ }
+ }
+
+ updateInterfaceInfo() {
+ const {_iface} = this;
+
+ this._iface = getIfaceInfo();
+
+ if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) {
+ this._cache.clear();
+ }
+ }
+
+ clear(hostname) {
+ if (hostname) {
+ this._cache.delete(hostname);
+ return;
+ }
+
+ this._cache.clear();
+ }
+}
+
+module.exports = CacheableLookup;
+module.exports.default = CacheableLookup;
+
+
+/***/ }),
+
+/***/ 4340:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const {PassThrough: PassThroughStream} = __nccwpck_require__(2413);
+
+module.exports = options => {
+ options = {...options};
+
+ const {array} = options;
+ let {encoding} = options;
+ const isBuffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || isBuffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (isBuffer) {
+ encoding = null;
+ }
+
+ const stream = new PassThroughStream({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ let length = 0;
+ const chunks = [];
+
+ stream.on('data', chunk => {
+ chunks.push(chunk);
+
+ if (objectMode) {
+ length = chunks.length;
+ } else {
+ length += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return chunks;
+ }
+
+ return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
+ };
+
+ stream.getBufferedLength = () => length;
+
+ return stream;
+};
+
+
+/***/ }),
+
+/***/ 7040:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const {constants: BufferConstants} = __nccwpck_require__(4293);
+const pump = __nccwpck_require__(8341);
+const bufferStream = __nccwpck_require__(4340);
+
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
+}
+
+async function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ options = {
+ maxBuffer: Infinity,
+ ...options
+ };
+
+ const {maxBuffer} = options;
+
+ let stream;
+ await new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ // Don't retrieve an oversized buffer.
+ if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
+ error.bufferedData = stream.getBufferedValue();
+ }
+
+ reject(error);
+ };
+
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
+
+ resolve();
+ });
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ });
+
+ return stream.getBufferedValue();
+}
+
+module.exports = getStream;
+// TODO: Remove this for the next major release
+module.exports.default = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});
+module.exports.array = (stream, options) => getStream(stream, {...options, array: true});
+module.exports.MaxBufferError = MaxBufferError;
+
+
+/***/ }),
+
+/***/ 8116:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const EventEmitter = __nccwpck_require__(8614);
+const urlLib = __nccwpck_require__(8835);
+const normalizeUrl = __nccwpck_require__(7952);
+const getStream = __nccwpck_require__(7040);
+const CachePolicy = __nccwpck_require__(1002);
+const Response = __nccwpck_require__(9004);
+const lowercaseKeys = __nccwpck_require__(9662);
+const cloneResponse = __nccwpck_require__(1312);
+const Keyv = __nccwpck_require__(1531);
+
+class CacheableRequest {
+ constructor(request, cacheAdapter) {
+ if (typeof request !== 'function') {
+ throw new TypeError('Parameter `request` must be a function');
+ }
+
+ this.cache = new Keyv({
+ uri: typeof cacheAdapter === 'string' && cacheAdapter,
+ store: typeof cacheAdapter !== 'string' && cacheAdapter,
+ namespace: 'cacheable-request'
+ });
+
+ return this.createCacheableRequest(request);
+ }
+
+ createCacheableRequest(request) {
+ return (opts, cb) => {
+ let url;
+ if (typeof opts === 'string') {
+ url = normalizeUrlObject(urlLib.parse(opts));
+ opts = {};
+ } else if (opts instanceof urlLib.URL) {
+ url = normalizeUrlObject(urlLib.parse(opts.toString()));
+ opts = {};
+ } else {
+ const [pathname, ...searchParts] = (opts.path || '').split('?');
+ const search = searchParts.length > 0 ?
+ `?${searchParts.join('?')}` :
+ '';
+ url = normalizeUrlObject({ ...opts, pathname, search });
+ }
+
+ opts = {
+ headers: {},
+ method: 'GET',
+ cache: true,
+ strictTtl: false,
+ automaticFailover: false,
+ ...opts,
+ ...urlObjectToRequestOptions(url)
+ };
+ opts.headers = lowercaseKeys(opts.headers);
+
+ const ee = new EventEmitter();
+ const normalizedUrlString = normalizeUrl(
+ urlLib.format(url),
+ {
+ stripWWW: false,
+ removeTrailingSlash: false,
+ stripAuthentication: false
+ }
+ );
+ const key = `${opts.method}:${normalizedUrlString}`;
+ let revalidate = false;
+ let madeRequest = false;
+
+ const makeRequest = opts => {
+ madeRequest = true;
+ let requestErrored = false;
+ let requestErrorCallback;
+
+ const requestErrorPromise = new Promise(resolve => {
+ requestErrorCallback = () => {
+ if (!requestErrored) {
+ requestErrored = true;
+ resolve();
+ }
+ };
+ });
+
+ const handler = response => {
+ if (revalidate && !opts.forceRefresh) {
+ response.status = response.statusCode;
+ const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response);
+ if (!revalidatedPolicy.modified) {
+ const headers = revalidatedPolicy.policy.responseHeaders();
+ response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url);
+ response.cachePolicy = revalidatedPolicy.policy;
+ response.fromCache = true;
+ }
+ }
+
+ if (!response.fromCache) {
+ response.cachePolicy = new CachePolicy(opts, response, opts);
+ response.fromCache = false;
+ }
+
+ let clonedResponse;
+ if (opts.cache && response.cachePolicy.storable()) {
+ clonedResponse = cloneResponse(response);
+
+ (async () => {
+ try {
+ const bodyPromise = getStream.buffer(response);
+
+ await Promise.race([
+ requestErrorPromise,
+ new Promise(resolve => response.once('end', resolve))
+ ]);
+
+ if (requestErrored) {
+ return;
+ }
+
+ const body = await bodyPromise;
+
+ const value = {
+ cachePolicy: response.cachePolicy.toObject(),
+ url: response.url,
+ statusCode: response.fromCache ? revalidate.statusCode : response.statusCode,
+ body
+ };
+
+ let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined;
+ if (opts.maxTtl) {
+ ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl;
+ }
+
+ await this.cache.set(key, value, ttl);
+ } catch (error) {
+ ee.emit('error', new CacheableRequest.CacheError(error));
+ }
+ })();
+ } else if (opts.cache && revalidate) {
+ (async () => {
+ try {
+ await this.cache.delete(key);
+ } catch (error) {
+ ee.emit('error', new CacheableRequest.CacheError(error));
+ }
+ })();
+ }
+
+ ee.emit('response', clonedResponse || response);
+ if (typeof cb === 'function') {
+ cb(clonedResponse || response);
+ }
+ };
+
+ try {
+ const req = request(opts, handler);
+ req.once('error', requestErrorCallback);
+ req.once('abort', requestErrorCallback);
+ ee.emit('request', req);
+ } catch (error) {
+ ee.emit('error', new CacheableRequest.RequestError(error));
+ }
+ };
+
+ (async () => {
+ const get = async opts => {
+ await Promise.resolve();
+
+ const cacheEntry = opts.cache ? await this.cache.get(key) : undefined;
+ if (typeof cacheEntry === 'undefined') {
+ return makeRequest(opts);
+ }
+
+ const policy = CachePolicy.fromObject(cacheEntry.cachePolicy);
+ if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) {
+ const headers = policy.responseHeaders();
+ const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url);
+ response.cachePolicy = policy;
+ response.fromCache = true;
+
+ ee.emit('response', response);
+ if (typeof cb === 'function') {
+ cb(response);
+ }
+ } else {
+ revalidate = cacheEntry;
+ opts.headers = policy.revalidationHeaders(opts);
+ makeRequest(opts);
+ }
+ };
+
+ const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error));
+ this.cache.once('error', errorHandler);
+ ee.on('response', () => this.cache.removeListener('error', errorHandler));
+
+ try {
+ await get(opts);
+ } catch (error) {
+ if (opts.automaticFailover && !madeRequest) {
+ makeRequest(opts);
+ }
+
+ ee.emit('error', new CacheableRequest.CacheError(error));
+ }
+ })();
+
+ return ee;
+ };
+ }
+}
+
+function urlObjectToRequestOptions(url) {
+ const options = { ...url };
+ options.path = `${url.pathname || '/'}${url.search || ''}`;
+ delete options.pathname;
+ delete options.search;
+ return options;
+}
+
+function normalizeUrlObject(url) {
+ // If url was parsed by url.parse or new URL:
+ // - hostname will be set
+ // - host will be hostname[:port]
+ // - port will be set if it was explicit in the parsed string
+ // Otherwise, url was from request options:
+ // - hostname or host may be set
+ // - host shall not have port encoded
+ return {
+ protocol: url.protocol,
+ auth: url.auth,
+ hostname: url.hostname || url.host || 'localhost',
+ port: url.port,
+ pathname: url.pathname,
+ search: url.search
+ };
+}
+
+CacheableRequest.RequestError = class extends Error {
+ constructor(error) {
+ super(error.message);
+ this.name = 'RequestError';
+ Object.assign(this, error);
+ }
+};
+
+CacheableRequest.CacheError = class extends Error {
+ constructor(error) {
+ super(error.message);
+ this.name = 'CacheError';
+ Object.assign(this, error);
+ }
+};
+
+module.exports = CacheableRequest;
+
+
+/***/ }),
+
+/***/ 1312:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const PassThrough = __nccwpck_require__(2413).PassThrough;
+const mimicResponse = __nccwpck_require__(2610);
+
+const cloneResponse = response => {
+ if (!(response && response.pipe)) {
+ throw new TypeError('Parameter `response` must be a response stream.');
+ }
+
+ const clone = new PassThrough();
+ mimicResponse(response, clone);
+
+ return response.pipe(clone);
+};
+
+module.exports = cloneResponse;
+
+
+/***/ }),
+
+/***/ 2391:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const {Transform, PassThrough} = __nccwpck_require__(2413);
+const zlib = __nccwpck_require__(8761);
+const mimicResponse = __nccwpck_require__(3877);
+
+module.exports = response => {
+ const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();
+
+ if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
+ return response;
+ }
+
+ // TODO: Remove this when targeting Node.js 12.
+ const isBrotli = contentEncoding === 'br';
+ if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {
+ response.destroy(new Error('Brotli is not supported on Node.js < 12'));
+ return response;
+ }
+
+ let isEmpty = true;
+
+ const checker = new Transform({
+ transform(data, _encoding, callback) {
+ isEmpty = false;
+
+ callback(null, data);
+ },
+
+ flush(callback) {
+ callback();
+ }
+ });
+
+ const finalStream = new PassThrough({
+ autoDestroy: false,
+ destroy(error, callback) {
+ response.destroy();
+
+ callback(error);
+ }
+ });
+
+ const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
+
+ decompressStream.once('error', error => {
+ if (isEmpty && !response.readable) {
+ finalStream.end();
+ return;
+ }
+
+ finalStream.destroy(error);
+ });
+
+ mimicResponse(response, finalStream);
+ response.pipe(checker).pipe(decompressStream).pipe(finalStream);
+
+ return finalStream;
+};
+
+
+/***/ }),
+
+/***/ 3877:
+/***/ ((module) => {
+
+"use strict";
+
+
+// We define these manually to ensure they're always copied
+// even if they would move up the prototype chain
+// https://nodejs.org/api/http.html#http_class_http_incomingmessage
+const knownProperties = [
+ 'aborted',
+ 'complete',
+ 'headers',
+ 'httpVersion',
+ 'httpVersionMinor',
+ 'httpVersionMajor',
+ 'method',
+ 'rawHeaders',
+ 'rawTrailers',
+ 'setTimeout',
+ 'socket',
+ 'statusCode',
+ 'statusMessage',
+ 'trailers',
+ 'url'
+];
+
+module.exports = (fromStream, toStream) => {
+ if (toStream._readableState.autoDestroy) {
+ throw new Error('The second stream must have the `autoDestroy` option set to `false`');
+ }
+
+ const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));
+
+ const properties = {};
+
+ for (const property of fromProperties) {
+ // Don't overwrite existing properties.
+ if (property in toStream) {
+ continue;
+ }
+
+ properties[property] = {
+ get() {
+ const value = fromStream[property];
+ const isFunction = typeof value === 'function';
+
+ return isFunction ? value.bind(fromStream) : value;
+ },
+ set(value) {
+ fromStream[property] = value;
+ },
+ enumerable: true,
+ configurable: false
+ };
+ }
+
+ Object.defineProperties(toStream, properties);
+
+ fromStream.once('aborted', () => {
+ toStream.destroy();
+
+ toStream.emit('aborted');
+ });
+
+ fromStream.once('close', () => {
+ if (fromStream.complete) {
+ if (toStream.readable) {
+ toStream.once('end', () => {
+ toStream.emit('close');
+ });
+ } else {
+ toStream.emit('close');
+ }
+ } else {
+ toStream.emit('close');
+ }
+ });
+
+ return toStream;
+};
+
+
+/***/ }),
+
+/***/ 6214:
+/***/ ((module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const tls_1 = __nccwpck_require__(4016);
+const deferToConnect = (socket, fn) => {
+ let listeners;
+ if (typeof fn === 'function') {
+ const connect = fn;
+ listeners = { connect };
+ }
+ else {
+ listeners = fn;
+ }
+ const hasConnectListener = typeof listeners.connect === 'function';
+ const hasSecureConnectListener = typeof listeners.secureConnect === 'function';
+ const hasCloseListener = typeof listeners.close === 'function';
+ const onConnect = () => {
+ if (hasConnectListener) {
+ listeners.connect();
+ }
+ if (socket instanceof tls_1.TLSSocket && hasSecureConnectListener) {
+ if (socket.authorized) {
+ listeners.secureConnect();
+ }
+ else if (!socket.authorizationError) {
+ socket.once('secureConnect', listeners.secureConnect);
+ }
+ }
+ if (hasCloseListener) {
+ socket.once('close', listeners.close);
+ }
+ };
+ if (socket.writable && !socket.connecting) {
+ onConnect();
+ }
+ else if (socket.connecting) {
+ socket.once('connect', onConnect);
+ }
+ else if (socket.destroyed && hasCloseListener) {
+ listeners.close(socket._hadError);
+ }
+};
+exports.default = deferToConnect;
+// For CommonJS default export support
+module.exports = deferToConnect;
+module.exports.default = deferToConnect;
+
+
+/***/ }),
+
+/***/ 8932:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 1205:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var once = __nccwpck_require__(1223);
+
+var noop = function() {};
+
+var isRequest = function(stream) {
+ return stream.setHeader && typeof stream.abort === 'function';
+};
+
+var isChildProcess = function(stream) {
+ return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
+};
+
+var eos = function(stream, opts, callback) {
+ if (typeof opts === 'function') return eos(stream, null, opts);
+ if (!opts) opts = {};
+
+ callback = once(callback || noop);
+
+ var ws = stream._writableState;
+ var rs = stream._readableState;
+ var readable = opts.readable || (opts.readable !== false && stream.readable);
+ var writable = opts.writable || (opts.writable !== false && stream.writable);
+ var cancelled = false;
+
+ var onlegacyfinish = function() {
+ if (!stream.writable) onfinish();
+ };
+
+ var onfinish = function() {
+ writable = false;
+ if (!readable) callback.call(stream);
+ };
+
+ var onend = function() {
+ readable = false;
+ if (!writable) callback.call(stream);
+ };
+
+ var onexit = function(exitCode) {
+ callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
+ };
+
+ var onerror = function(err) {
+ callback.call(stream, err);
+ };
+
+ var onclose = function() {
+ process.nextTick(onclosenexttick);
+ };
+
+ var onclosenexttick = function() {
+ if (cancelled) return;
+ if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
+ if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
+ };
+
+ var onrequest = function() {
+ stream.req.on('finish', onfinish);
+ };
+
+ if (isRequest(stream)) {
+ stream.on('complete', onfinish);
+ stream.on('abort', onclose);
+ if (stream.req) onrequest();
+ else stream.on('request', onrequest);
+ } else if (writable && !ws) { // legacy streams
+ stream.on('end', onlegacyfinish);
+ stream.on('close', onlegacyfinish);
+ }
+
+ if (isChildProcess(stream)) stream.on('exit', onexit);
+
+ stream.on('end', onend);
+ stream.on('finish', onfinish);
+ if (opts.error !== false) stream.on('error', onerror);
+ stream.on('close', onclose);
+
+ return function() {
+ cancelled = true;
+ stream.removeListener('complete', onfinish);
+ stream.removeListener('abort', onclose);
+ stream.removeListener('request', onrequest);
+ if (stream.req) stream.req.removeListener('finish', onfinish);
+ stream.removeListener('end', onlegacyfinish);
+ stream.removeListener('close', onlegacyfinish);
+ stream.removeListener('finish', onfinish);
+ stream.removeListener('exit', onexit);
+ stream.removeListener('end', onend);
+ stream.removeListener('error', onerror);
+ stream.removeListener('close', onclose);
+ };
+};
+
+module.exports = eos;
+
+
+/***/ }),
+
+/***/ 5582:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const stringify = __nccwpck_require__(4810);
+const compile = __nccwpck_require__(7123);
+const expand = __nccwpck_require__(6944);
+const parse = __nccwpck_require__(9889);
+
+/**
+ * Expand the given pattern or create a regex-compatible string.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
+ * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {String}
+ * @api public
+ */
+
+const braces = (input, options = {}) => {
+ let output = [];
+
+ if (Array.isArray(input)) {
+ for (let pattern of input) {
+ let result = braces.create(pattern, options);
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+ } else {
+ output = [].concat(braces.create(input, options));
+ }
+
+ if (options && options.expand === true && options.nodupes === true) {
+ output = [...new Set(output)];
+ }
+ return output;
+};
+
+/**
+ * Parse the given `str` with the given `options`.
+ *
+ * ```js
+ * // braces.parse(pattern, [, options]);
+ * const ast = braces.parse('a/{b,c}/d');
+ * console.log(ast);
+ * ```
+ * @param {String} pattern Brace pattern to parse
+ * @param {Object} options
+ * @return {Object} Returns an AST
+ * @api public
+ */
+
+braces.parse = (input, options = {}) => parse(input, options);
+
+/**
+ * Creates a braces string from an AST, or an AST node.
+ *
+ * ```js
+ * const braces = require('braces');
+ * let ast = braces.parse('foo/{a,b}/bar');
+ * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.stringify = (input, options = {}) => {
+ if (typeof input === 'string') {
+ return stringify(braces.parse(input, options), options);
+ }
+ return stringify(input, options);
+};
+
+/**
+ * Compiles a brace pattern into a regex-compatible, optimized string.
+ * This method is called by the main [braces](#braces) function by default.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.compile('a/{b,c}/d'));
+ * //=> ['a/(b|c)/d']
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.compile = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+ return compile(input, options);
+};
+
+/**
+ * Expands a brace pattern into an array. This method is called by the
+ * main [braces](#braces) function when `options.expand` is true. Before
+ * using this method it's recommended that you read the [performance notes](#performance))
+ * and advantages of using [.compile](#compile) instead.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.expand('a/{b,c}/d'));
+ * //=> ['a/b/d', 'a/c/d'];
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.expand = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+
+ let result = expand(input, options);
+
+ // filter out empty strings if specified
+ if (options.noempty === true) {
+ result = result.filter(Boolean);
+ }
+
+ // filter out duplicates if specified
+ if (options.nodupes === true) {
+ result = [...new Set(result)];
+ }
+
+ return result;
+};
+
+/**
+ * Processes a brace pattern and returns either an expanded array
+ * (if `options.expand` is true), a highly optimized regex-compatible string.
+ * This method is called by the main [braces](#braces) function.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
+ * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.create = (input, options = {}) => {
+ if (input === '' || input.length < 3) {
+ return [input];
+ }
+
+ return options.expand !== true
+ ? braces.compile(input, options)
+ : braces.expand(input, options);
+};
+
+/**
+ * Expose "braces"
+ */
+
+module.exports = braces;
+
+
+/***/ }),
+
+/***/ 7123:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const fill = __nccwpck_require__(791);
+const utils = __nccwpck_require__(7691);
+
+const compile = (ast, options = {}) => {
+ let walk = (node, parent = {}) => {
+ let invalidBlock = utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let invalid = invalidBlock === true || invalidNode === true;
+ let prefix = options.escapeInvalid === true ? '\\' : '';
+ let output = '';
+
+ if (node.isOpen === true) {
+ return prefix + node.value;
+ }
+ if (node.isClose === true) {
+ return prefix + node.value;
+ }
+
+ if (node.type === 'open') {
+ return invalid ? (prefix + node.value) : '(';
+ }
+
+ if (node.type === 'close') {
+ return invalid ? (prefix + node.value) : ')';
+ }
+
+ if (node.type === 'comma') {
+ return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+ let range = fill(...args, { ...options, wrap: false, toRegex: true });
+
+ if (range.length !== 0) {
+ return args.length > 1 && range.length > 1 ? `(${range})` : range;
+ }
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += walk(child, node);
+ }
+ }
+ return output;
+ };
+
+ return walk(ast);
+};
+
+module.exports = compile;
+
+
+/***/ }),
+
+/***/ 5412:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+
+ // Digits
+ CHAR_0: '0', /* 0 */
+ CHAR_9: '9', /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 'A', /* A */
+ CHAR_LOWERCASE_A: 'a', /* a */
+ CHAR_UPPERCASE_Z: 'Z', /* Z */
+ CHAR_LOWERCASE_Z: 'z', /* z */
+
+ CHAR_LEFT_PARENTHESES: '(', /* ( */
+ CHAR_RIGHT_PARENTHESES: ')', /* ) */
+
+ CHAR_ASTERISK: '*', /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: '&', /* & */
+ CHAR_AT: '@', /* @ */
+ CHAR_BACKSLASH: '\\', /* \ */
+ CHAR_BACKTICK: '`', /* ` */
+ CHAR_CARRIAGE_RETURN: '\r', /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
+ CHAR_COLON: ':', /* : */
+ CHAR_COMMA: ',', /* , */
+ CHAR_DOLLAR: '$', /* . */
+ CHAR_DOT: '.', /* . */
+ CHAR_DOUBLE_QUOTE: '"', /* " */
+ CHAR_EQUAL: '=', /* = */
+ CHAR_EXCLAMATION_MARK: '!', /* ! */
+ CHAR_FORM_FEED: '\f', /* \f */
+ CHAR_FORWARD_SLASH: '/', /* / */
+ CHAR_HASH: '#', /* # */
+ CHAR_HYPHEN_MINUS: '-', /* - */
+ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
+ CHAR_LEFT_CURLY_BRACE: '{', /* { */
+ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
+ CHAR_LINE_FEED: '\n', /* \n */
+ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
+ CHAR_PERCENT: '%', /* % */
+ CHAR_PLUS: '+', /* + */
+ CHAR_QUESTION_MARK: '?', /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
+ CHAR_RIGHT_CURLY_BRACE: '}', /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
+ CHAR_SEMICOLON: ';', /* ; */
+ CHAR_SINGLE_QUOTE: '\'', /* ' */
+ CHAR_SPACE: ' ', /* */
+ CHAR_TAB: '\t', /* \t */
+ CHAR_UNDERSCORE: '_', /* _ */
+ CHAR_VERTICAL_LINE: '|', /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+};
+
+
+/***/ }),
+
+/***/ 6944:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const fill = __nccwpck_require__(791);
+const stringify = __nccwpck_require__(4810);
+const utils = __nccwpck_require__(7691);
+
+const append = (queue = '', stash = '', enclose = false) => {
+ let result = [];
+
+ queue = [].concat(queue);
+ stash = [].concat(stash);
+
+ if (!stash.length) return queue;
+ if (!queue.length) {
+ return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
+ }
+
+ for (let item of queue) {
+ if (Array.isArray(item)) {
+ for (let value of item) {
+ result.push(append(value, stash, enclose));
+ }
+ } else {
+ for (let ele of stash) {
+ if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
+ result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
+ }
+ }
+ }
+ return utils.flatten(result);
+};
+
+const expand = (ast, options = {}) => {
+ let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
+
+ let walk = (node, parent = {}) => {
+ node.queue = [];
+
+ let p = parent;
+ let q = parent.queue;
+
+ while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
+ p = p.parent;
+ q = p.queue;
+ }
+
+ if (node.invalid || node.dollar) {
+ q.push(append(q.pop(), stringify(node, options)));
+ return;
+ }
+
+ if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
+ q.push(append(q.pop(), ['{}']));
+ return;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+
+ if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
+ throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
+ }
+
+ let range = fill(...args, options);
+ if (range.length === 0) {
+ range = stringify(node, options);
+ }
+
+ q.push(append(q.pop(), range));
+ node.nodes = [];
+ return;
+ }
+
+ let enclose = utils.encloseBrace(node);
+ let queue = node.queue;
+ let block = node;
+
+ while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
+ block = block.parent;
+ queue = block.queue;
+ }
+
+ for (let i = 0; i < node.nodes.length; i++) {
+ let child = node.nodes[i];
+
+ if (child.type === 'comma' && node.type === 'brace') {
+ if (i === 1) queue.push('');
+ queue.push('');
+ continue;
+ }
+
+ if (child.type === 'close') {
+ q.push(append(q.pop(), queue, enclose));
+ continue;
+ }
+
+ if (child.value && child.type !== 'open') {
+ queue.push(append(queue.pop(), child.value));
+ continue;
+ }
+
+ if (child.nodes) {
+ walk(child, node);
+ }
+ }
+
+ return queue;
+ };
+
+ return utils.flatten(walk(ast));
+};
+
+module.exports = expand;
+
+
+/***/ }),
+
+/***/ 9889:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const stringify = __nccwpck_require__(4810);
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ CHAR_BACKSLASH, /* \ */
+ CHAR_BACKTICK, /* ` */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_RIGHT_SQUARE_BRACKET, /* ] */
+ CHAR_DOUBLE_QUOTE, /* " */
+ CHAR_SINGLE_QUOTE, /* ' */
+ CHAR_NO_BREAK_SPACE,
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE
+} = __nccwpck_require__(5412);
+
+/**
+ * parse
+ */
+
+const parse = (input, options = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ let opts = options || {};
+ let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ if (input.length > max) {
+ throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
+ }
+
+ let ast = { type: 'root', input, nodes: [] };
+ let stack = [ast];
+ let block = ast;
+ let prev = ast;
+ let brackets = 0;
+ let length = input.length;
+ let index = 0;
+ let depth = 0;
+ let value;
+ let memo = {};
+
+ /**
+ * Helpers
+ */
+
+ const advance = () => input[index++];
+ const push = node => {
+ if (node.type === 'text' && prev.type === 'dot') {
+ prev.type = 'text';
+ }
+
+ if (prev && prev.type === 'text' && node.type === 'text') {
+ prev.value += node.value;
+ return;
+ }
+
+ block.nodes.push(node);
+ node.parent = block;
+ node.prev = prev;
+ prev = node;
+ return node;
+ };
+
+ push({ type: 'bos' });
+
+ while (index < length) {
+ block = stack[stack.length - 1];
+ value = advance();
+
+ /**
+ * Invalid chars
+ */
+
+ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
+ continue;
+ }
+
+ /**
+ * Escaped chars
+ */
+
+ if (value === CHAR_BACKSLASH) {
+ push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
+ continue;
+ }
+
+ /**
+ * Right square bracket (literal): ']'
+ */
+
+ if (value === CHAR_RIGHT_SQUARE_BRACKET) {
+ push({ type: 'text', value: '\\' + value });
+ continue;
+ }
+
+ /**
+ * Left square bracket: '['
+ */
+
+ if (value === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+
+ let closed = true;
+ let next;
+
+ while (index < length && (next = advance())) {
+ value += next;
+
+ if (next === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+ continue;
+ }
+
+ if (next === CHAR_BACKSLASH) {
+ value += advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ brackets--;
+
+ if (brackets === 0) {
+ break;
+ }
+ }
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === CHAR_LEFT_PARENTHESES) {
+ block = push({ type: 'paren', nodes: [] });
+ stack.push(block);
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (value === CHAR_RIGHT_PARENTHESES) {
+ if (block.type !== 'paren') {
+ push({ type: 'text', value });
+ continue;
+ }
+ block = stack.pop();
+ push({ type: 'text', value });
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Quotes: '|"|`
+ */
+
+ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
+ let open = value;
+ let next;
+
+ if (options.keepQuotes !== true) {
+ value = '';
+ }
+
+ while (index < length && (next = advance())) {
+ if (next === CHAR_BACKSLASH) {
+ value += next + advance();
+ continue;
+ }
+
+ if (next === open) {
+ if (options.keepQuotes === true) value += next;
+ break;
+ }
+
+ value += next;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Left curly brace: '{'
+ */
+
+ if (value === CHAR_LEFT_CURLY_BRACE) {
+ depth++;
+
+ let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
+ let brace = {
+ type: 'brace',
+ open: true,
+ close: false,
+ dollar,
+ depth,
+ commas: 0,
+ ranges: 0,
+ nodes: []
+ };
+
+ block = push(brace);
+ stack.push(block);
+ push({ type: 'open', value });
+ continue;
+ }
+
+ /**
+ * Right curly brace: '}'
+ */
+
+ if (value === CHAR_RIGHT_CURLY_BRACE) {
+ if (block.type !== 'brace') {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ let type = 'close';
+ block = stack.pop();
+ block.close = true;
+
+ push({ type, value });
+ depth--;
+
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Comma: ','
+ */
+
+ if (value === CHAR_COMMA && depth > 0) {
+ if (block.ranges > 0) {
+ block.ranges = 0;
+ let open = block.nodes.shift();
+ block.nodes = [open, { type: 'text', value: stringify(block) }];
+ }
+
+ push({ type: 'comma', value });
+ block.commas++;
+ continue;
+ }
+
+ /**
+ * Dot: '.'
+ */
+
+ if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
+ let siblings = block.nodes;
+
+ if (depth === 0 || siblings.length === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (prev.type === 'dot') {
+ block.range = [];
+ prev.value += value;
+ prev.type = 'range';
+
+ if (block.nodes.length !== 3 && block.nodes.length !== 5) {
+ block.invalid = true;
+ block.ranges = 0;
+ prev.type = 'text';
+ continue;
+ }
+
+ block.ranges++;
+ block.args = [];
+ continue;
+ }
+
+ if (prev.type === 'range') {
+ siblings.pop();
+
+ let before = siblings[siblings.length - 1];
+ before.value += prev.value + value;
+ prev = before;
+ block.ranges--;
+ continue;
+ }
+
+ push({ type: 'dot', value });
+ continue;
+ }
+
+ /**
+ * Text
+ */
+
+ push({ type: 'text', value });
+ }
+
+ // Mark imbalanced braces and brackets as invalid
+ do {
+ block = stack.pop();
+
+ if (block.type !== 'root') {
+ block.nodes.forEach(node => {
+ if (!node.nodes) {
+ if (node.type === 'open') node.isOpen = true;
+ if (node.type === 'close') node.isClose = true;
+ if (!node.nodes) node.type = 'text';
+ node.invalid = true;
+ }
+ });
+
+ // get the location of the block on parent.nodes (block's siblings)
+ let parent = stack[stack.length - 1];
+ let index = parent.nodes.indexOf(block);
+ // replace the (invalid) block with it's nodes
+ parent.nodes.splice(index, 1, ...block.nodes);
+ }
+ } while (stack.length > 0);
+
+ push({ type: 'eos' });
+ return ast;
+};
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 4810:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const utils = __nccwpck_require__(7691);
+
+module.exports = (ast, options = {}) => {
+ let stringify = (node, parent = {}) => {
+ let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let output = '';
+
+ if (node.value) {
+ if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
+ return '\\' + node.value;
+ }
+ return node.value;
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += stringify(child);
+ }
+ }
+ return output;
+ };
+
+ return stringify(ast);
+};
+
+
+
+/***/ }),
+
+/***/ 7691:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+exports.isInteger = num => {
+ if (typeof num === 'number') {
+ return Number.isInteger(num);
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isInteger(Number(num));
+ }
+ return false;
+};
+
+/**
+ * Find a node of the given type
+ */
+
+exports.find = (node, type) => node.nodes.find(node => node.type === type);
+
+/**
+ * Find a node of the given type
+ */
+
+exports.exceedsLimit = (min, max, step = 1, limit) => {
+ if (limit === false) return false;
+ if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
+ return ((Number(max) - Number(min)) / Number(step)) >= limit;
+};
+
+/**
+ * Escape the given node with '\\' before node.value
+ */
+
+exports.escapeNode = (block, n = 0, type) => {
+ let node = block.nodes[n];
+ if (!node) return;
+
+ if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
+ if (node.escaped !== true) {
+ node.value = '\\' + node.value;
+ node.escaped = true;
+ }
+ }
+};
+
+/**
+ * Returns true if the given brace node should be enclosed in literal braces
+ */
+
+exports.encloseBrace = node => {
+ if (node.type !== 'brace') return false;
+ if ((node.commas >> 0 + node.ranges >> 0) === 0) {
+ node.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a brace node is invalid.
+ */
+
+exports.isInvalidBrace = block => {
+ if (block.type !== 'brace') return false;
+ if (block.invalid === true || block.dollar) return true;
+ if ((block.commas >> 0 + block.ranges >> 0) === 0) {
+ block.invalid = true;
+ return true;
+ }
+ if (block.open !== true || block.close !== true) {
+ block.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a node is an open or close node
+ */
+
+exports.isOpenOrClose = node => {
+ if (node.type === 'open' || node.type === 'close') {
+ return true;
+ }
+ return node.open === true || node.close === true;
+};
+
+/**
+ * Reduce an array of text nodes.
+ */
+
+exports.reduce = nodes => nodes.reduce((acc, node) => {
+ if (node.type === 'text') acc.push(node.value);
+ if (node.type === 'range') node.type = 'text';
+ return acc;
+}, []);
+
+/**
+ * Flatten an array
+ */
+
+exports.flatten = (...args) => {
+ const result = [];
+ const flat = arr => {
+ for (let i = 0; i < arr.length; i++) {
+ let ele = arr[i];
+ Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
+ }
+ return result;
+ };
+ flat(args);
+ return result;
+};
+
+
+/***/ }),
+
+/***/ 791:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/*!
+ * fill-range
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+
+
+const util = __nccwpck_require__(1669);
+const toRegexRange = __nccwpck_require__(6867);
+
+const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+
+const transform = toNumber => {
+ return value => toNumber === true ? Number(value) : String(value);
+};
+
+const isValidValue = value => {
+ return typeof value === 'number' || (typeof value === 'string' && value !== '');
+};
+
+const isNumber = num => Number.isInteger(+num);
+
+const zeros = input => {
+ let value = `${input}`;
+ let index = -1;
+ if (value[0] === '-') value = value.slice(1);
+ if (value === '0') return false;
+ while (value[++index] === '0');
+ return index > 0;
+};
+
+const stringify = (start, end, options) => {
+ if (typeof start === 'string' || typeof end === 'string') {
+ return true;
+ }
+ return options.stringify === true;
+};
+
+const pad = (input, maxLength, toNumber) => {
+ if (maxLength > 0) {
+ let dash = input[0] === '-' ? '-' : '';
+ if (dash) input = input.slice(1);
+ input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
+ }
+ if (toNumber === false) {
+ return String(input);
+ }
+ return input;
+};
+
+const toMaxLen = (input, maxLength) => {
+ let negative = input[0] === '-' ? '-' : '';
+ if (negative) {
+ input = input.slice(1);
+ maxLength--;
+ }
+ while (input.length < maxLength) input = '0' + input;
+ return negative ? ('-' + input) : input;
+};
+
+const toSequence = (parts, options) => {
+ parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+ parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+
+ let prefix = options.capture ? '' : '?:';
+ let positives = '';
+ let negatives = '';
+ let result;
+
+ if (parts.positives.length) {
+ positives = parts.positives.join('|');
+ }
+
+ if (parts.negatives.length) {
+ negatives = `-(${prefix}${parts.negatives.join('|')})`;
+ }
+
+ if (positives && negatives) {
+ result = `${positives}|${negatives}`;
+ } else {
+ result = positives || negatives;
+ }
+
+ if (options.wrap) {
+ return `(${prefix}${result})`;
+ }
+
+ return result;
+};
+
+const toRange = (a, b, isNumbers, options) => {
+ if (isNumbers) {
+ return toRegexRange(a, b, { wrap: false, ...options });
+ }
+
+ let start = String.fromCharCode(a);
+ if (a === b) return start;
+
+ let stop = String.fromCharCode(b);
+ return `[${start}-${stop}]`;
+};
+
+const toRegex = (start, end, options) => {
+ if (Array.isArray(start)) {
+ let wrap = options.wrap === true;
+ let prefix = options.capture ? '' : '?:';
+ return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
+ }
+ return toRegexRange(start, end, options);
+};
+
+const rangeError = (...args) => {
+ return new RangeError('Invalid range arguments: ' + util.inspect(...args));
+};
+
+const invalidRange = (start, end, options) => {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+};
+
+const invalidStep = (step, options) => {
+ if (options.strictRanges === true) {
+ throw new TypeError(`Expected step "${step}" to be a number`);
+ }
+ return [];
+};
+
+const fillNumbers = (start, end, step = 1, options = {}) => {
+ let a = Number(start);
+ let b = Number(end);
+
+ if (!Number.isInteger(a) || !Number.isInteger(b)) {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+ }
+
+ // fix negative zero
+ if (a === 0) a = 0;
+ if (b === 0) b = 0;
+
+ let descending = a > b;
+ let startString = String(start);
+ let endString = String(end);
+ let stepString = String(step);
+ step = Math.max(Math.abs(step), 1);
+
+ let padded = zeros(startString) || zeros(endString) || zeros(stepString);
+ let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
+ let toNumber = padded === false && stringify(start, end, options) === false;
+ let format = options.transform || transform(toNumber);
+
+ if (options.toRegex && step === 1) {
+ return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
+ }
+
+ let parts = { negatives: [], positives: [] };
+ let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ if (options.toRegex === true && step > 1) {
+ push(a);
+ } else {
+ range.push(pad(format(a, index), maxLen, toNumber));
+ }
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return step > 1
+ ? toSequence(parts, options)
+ : toRegex(range, null, { wrap: false, ...options });
+ }
+
+ return range;
+};
+
+const fillLetters = (start, end, step = 1, options = {}) => {
+ if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
+ return invalidRange(start, end, options);
+ }
+
+
+ let format = options.transform || (val => String.fromCharCode(val));
+ let a = `${start}`.charCodeAt(0);
+ let b = `${end}`.charCodeAt(0);
+
+ let descending = a > b;
+ let min = Math.min(a, b);
+ let max = Math.max(a, b);
+
+ if (options.toRegex && step === 1) {
+ return toRange(min, max, false, options);
+ }
+
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ range.push(format(a, index));
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return toRegex(range, null, { wrap: false, options });
+ }
+
+ return range;
+};
+
+const fill = (start, end, step, options = {}) => {
+ if (end == null && isValidValue(start)) {
+ return [start];
+ }
+
+ if (!isValidValue(start) || !isValidValue(end)) {
+ return invalidRange(start, end, options);
+ }
+
+ if (typeof step === 'function') {
+ return fill(start, end, 1, { transform: step });
+ }
+
+ if (isObject(step)) {
+ return fill(start, end, 0, step);
+ }
+
+ let opts = { ...options };
+ if (opts.capture === true) opts.wrap = true;
+ step = step || opts.step || 1;
+
+ if (!isNumber(step)) {
+ if (step != null && !isObject(step)) return invalidStep(step, opts);
+ return fill(start, end, 1, step);
+ }
+
+ if (isNumber(start) && isNumber(end)) {
+ return fillNumbers(start, end, step, opts);
+ }
+
+ return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
+};
+
+module.exports = fill;
+
+
+/***/ }),
+
+/***/ 2840:
+/***/ ((module) => {
+
+"use strict";
+/*!
+ * is-number
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+module.exports = function(num) {
+ if (typeof num === 'number') {
+ return num - num === 0;
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
+ }
+ return false;
+};
+
+
+/***/ }),
+
+/***/ 3913:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const util = __nccwpck_require__(1669);
+const braces = __nccwpck_require__(5582);
+const picomatch = __nccwpck_require__(8569);
+const utils = __nccwpck_require__(479);
+const isEmptyString = val => typeof val === 'string' && (val === '' || val === './');
+
+/**
+ * Returns an array of strings that match one or more glob patterns.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm(list, patterns[, options]);
+ *
+ * console.log(mm(['a.js', 'a.txt'], ['*.js']));
+ * //=> [ 'a.js' ]
+ * ```
+ * @param {String|Array} list List of strings to match.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} options See available [options](#options)
+ * @return {Array} Returns an array of matches
+ * @summary false
+ * @api public
+ */
+
+const micromatch = (list, patterns, options) => {
+ patterns = [].concat(patterns);
+ list = [].concat(list);
+
+ let omit = new Set();
+ let keep = new Set();
+ let items = new Set();
+ let negatives = 0;
+
+ let onResult = state => {
+ items.add(state.output);
+ if (options && options.onResult) {
+ options.onResult(state);
+ }
+ };
+
+ for (let i = 0; i < patterns.length; i++) {
+ let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);
+ let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
+ if (negated) negatives++;
+
+ for (let item of list) {
+ let matched = isMatch(item, true);
+
+ let match = negated ? !matched.isMatch : matched.isMatch;
+ if (!match) continue;
+
+ if (negated) {
+ omit.add(matched.output);
+ } else {
+ omit.delete(matched.output);
+ keep.add(matched.output);
+ }
+ }
+ }
+
+ let result = negatives === patterns.length ? [...items] : [...keep];
+ let matches = result.filter(item => !omit.has(item));
+
+ if (options && matches.length === 0) {
+ if (options.failglob === true) {
+ throw new Error(`No matches found for "${patterns.join(', ')}"`);
+ }
+
+ if (options.nonull === true || options.nullglob === true) {
+ return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
+ }
+ }
+
+ return matches;
+};
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.match = micromatch;
+
+/**
+ * Returns a matcher function from the given glob `pattern` and `options`.
+ * The returned function takes a string to match as its only argument and returns
+ * true if the string is a match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matcher(pattern[, options]);
+ *
+ * const isMatch = mm.matcher('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @param {String} `pattern` Glob pattern
+ * @param {Object} `options`
+ * @return {Function} Returns a matcher function.
+ * @api public
+ */
+
+micromatch.matcher = (pattern, options) => picomatch(pattern, options);
+
+/**
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.isMatch(string, patterns[, options]);
+ *
+ * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(mm.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.any = micromatch.isMatch;
+
+/**
+ * Returns a list of strings that _**do not match any**_ of the given `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.not(list, patterns[, options]);
+ *
+ * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
+ * //=> ['b.b', 'c.c']
+ * ```
+ * @param {Array} `list` Array of strings to match.
+ * @param {String|Array} `patterns` One or more glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Array} Returns an array of strings that **do not match** the given patterns.
+ * @api public
+ */
+
+micromatch.not = (list, patterns, options = {}) => {
+ patterns = [].concat(patterns).map(String);
+ let result = new Set();
+ let items = [];
+
+ let onResult = state => {
+ if (options.onResult) options.onResult(state);
+ items.push(state.output);
+ };
+
+ let matches = micromatch(list, patterns, { ...options, onResult });
+
+ for (let item of items) {
+ if (!matches.includes(item)) {
+ result.add(item);
+ }
+ }
+ return [...result];
+};
+
+/**
+ * Returns true if the given `string` contains the given pattern. Similar
+ * to [.isMatch](#isMatch) but the pattern can match any part of the string.
+ *
+ * ```js
+ * var mm = require('micromatch');
+ * // mm.contains(string, pattern[, options]);
+ *
+ * console.log(mm.contains('aa/bb/cc', '*b'));
+ * //=> true
+ * console.log(mm.contains('aa/bb/cc', '*d'));
+ * //=> false
+ * ```
+ * @param {String} `str` The string to match.
+ * @param {String|Array} `patterns` Glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if the patter matches any part of `str`.
+ * @api public
+ */
micromatch.contains = (str, pattern, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
- if (Array.isArray(pattern)) {
- return pattern.some(p => micromatch.contains(str, p, options));
- }
+ if (Array.isArray(pattern)) {
+ return pattern.some(p => micromatch.contains(str, p, options));
+ }
+
+ if (typeof pattern === 'string') {
+ if (isEmptyString(str) || isEmptyString(pattern)) {
+ return false;
+ }
+
+ if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
+ return true;
+ }
+ }
+
+ return micromatch.isMatch(str, pattern, { ...options, contains: true });
+};
+
+/**
+ * Filter the keys of the given object with the given `glob` pattern
+ * and `options`. Does not attempt to match nested keys. If you need this feature,
+ * use [glob-object][] instead.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matchKeys(object, patterns[, options]);
+ *
+ * const obj = { aa: 'a', ab: 'b', ac: 'c' };
+ * console.log(mm.matchKeys(obj, '*b'));
+ * //=> { ab: 'b' }
+ * ```
+ * @param {Object} `object` The object with keys to filter.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Object} Returns an object with only keys that match the given patterns.
+ * @api public
+ */
+
+micromatch.matchKeys = (obj, patterns, options) => {
+ if (!utils.isObject(obj)) {
+ throw new TypeError('Expected the first argument to be an object');
+ }
+ let keys = micromatch(Object.keys(obj), patterns, options);
+ let res = {};
+ for (let key of keys) res[key] = obj[key];
+ return res;
+};
+
+/**
+ * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.some(list, patterns[, options]);
+ *
+ * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // true
+ * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.some = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (items.some(item => isMatch(item))) {
+ return true;
+ }
+ }
+ return false;
+};
+
+/**
+ * Returns true if every string in the given `list` matches
+ * any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.every(list, patterns[, options]);
+ *
+ * console.log(mm.every('foo.js', ['foo.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // false
+ * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.every = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (!items.every(item => isMatch(item))) {
+ return false;
+ }
+ }
+ return true;
+};
+
+/**
+ * Returns true if **all** of the given `patterns` match
+ * the specified string.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.all(string, patterns[, options]);
+ *
+ * console.log(mm.all('foo.js', ['foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', '!foo.js']));
+ * // false
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
+ * // true
+ * ```
+ * @param {String|Array} `str` The string to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.all = (str, patterns, options) => {
+ if (typeof str !== 'string') {
+ throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
+ }
+
+ return [].concat(patterns).every(p => picomatch(p, options)(str));
+};
+
+/**
+ * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.capture(pattern, string[, options]);
+ *
+ * console.log(mm.capture('test/*.js', 'test/foo.js'));
+ * //=> ['foo']
+ * console.log(mm.capture('test/*.js', 'foo/bar.css'));
+ * //=> null
+ * ```
+ * @param {String} `glob` Glob pattern to use for matching.
+ * @param {String} `input` String to match
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
+ * @api public
+ */
+
+micromatch.capture = (glob, input, options) => {
+ let posix = utils.isWindows(options);
+ let regex = picomatch.makeRe(String(glob), { ...options, capture: true });
+ let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);
+
+ if (match) {
+ return match.slice(1).map(v => v === void 0 ? '' : v);
+ }
+};
+
+/**
+ * Create a regular expression from the given glob `pattern`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.makeRe(pattern[, options]);
+ *
+ * console.log(mm.makeRe('*.js'));
+ * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
+ * ```
+ * @param {String} `pattern` A glob pattern to convert to regex.
+ * @param {Object} `options`
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
+
+micromatch.makeRe = (...args) => picomatch.makeRe(...args);
+
+/**
+ * Scan a glob pattern to separate the pattern into segments. Used
+ * by the [split](#split) method.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm.scan(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
+ */
+
+micromatch.scan = (...args) => picomatch.scan(...args);
+
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm(pattern[, options]);
+ * ```
+ * @param {String} `glob`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as regex source string.
+ * @api public
+ */
+
+micromatch.parse = (patterns, options) => {
+ let res = [];
+ for (let pattern of [].concat(patterns || [])) {
+ for (let str of braces(String(pattern), options)) {
+ res.push(picomatch.parse(str, options));
+ }
+ }
+ return res;
+};
+
+/**
+ * Process the given brace `pattern`.
+ *
+ * ```js
+ * const { braces } = require('micromatch');
+ * console.log(braces('foo/{a,b,c}/bar'));
+ * //=> [ 'foo/(a|b|c)/bar' ]
+ *
+ * console.log(braces('foo/{a,b,c}/bar', { expand: true }));
+ * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
+ * ```
+ * @param {String} `pattern` String with brace pattern to process.
+ * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
+ * @return {Array}
+ * @api public
+ */
+
+micromatch.braces = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
+ return [pattern];
+ }
+ return braces(pattern, options);
+};
+
+/**
+ * Expand braces
+ */
+
+micromatch.braceExpand = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ return micromatch.braces(pattern, { ...options, expand: true });
+};
+
+/**
+ * Expose micromatch
+ */
+
+module.exports = micromatch;
+
+
+/***/ }),
+
+/***/ 6867:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/*!
+ * to-regex-range
+ *
+ * Copyright (c) 2015-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+const isNumber = __nccwpck_require__(2840);
+
+const toRegexRange = (min, max, options) => {
+ if (isNumber(min) === false) {
+ throw new TypeError('toRegexRange: expected the first argument to be a number');
+ }
+
+ if (max === void 0 || min === max) {
+ return String(min);
+ }
+
+ if (isNumber(max) === false) {
+ throw new TypeError('toRegexRange: expected the second argument to be a number.');
+ }
+
+ let opts = { relaxZeros: true, ...options };
+ if (typeof opts.strictZeros === 'boolean') {
+ opts.relaxZeros = opts.strictZeros === false;
+ }
+
+ let relax = String(opts.relaxZeros);
+ let shorthand = String(opts.shorthand);
+ let capture = String(opts.capture);
+ let wrap = String(opts.wrap);
+ let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
+
+ if (toRegexRange.cache.hasOwnProperty(cacheKey)) {
+ return toRegexRange.cache[cacheKey].result;
+ }
+
+ let a = Math.min(min, max);
+ let b = Math.max(min, max);
+
+ if (Math.abs(a - b) === 1) {
+ let result = min + '|' + max;
+ if (opts.capture) {
+ return `(${result})`;
+ }
+ if (opts.wrap === false) {
+ return result;
+ }
+ return `(?:${result})`;
+ }
+
+ let isPadded = hasPadding(min) || hasPadding(max);
+ let state = { min, max, a, b };
+ let positives = [];
+ let negatives = [];
+
+ if (isPadded) {
+ state.isPadded = isPadded;
+ state.maxLen = String(state.max).length;
+ }
+
+ if (a < 0) {
+ let newMin = b < 0 ? Math.abs(b) : 1;
+ negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
+ a = state.a = 0;
+ }
+
+ if (b >= 0) {
+ positives = splitToPatterns(a, b, state, opts);
+ }
+
+ state.negatives = negatives;
+ state.positives = positives;
+ state.result = collatePatterns(negatives, positives, opts);
+
+ if (opts.capture === true) {
+ state.result = `(${state.result})`;
+ } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
+ state.result = `(?:${state.result})`;
+ }
+
+ toRegexRange.cache[cacheKey] = state;
+ return state.result;
+};
+
+function collatePatterns(neg, pos, options) {
+ let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];
+ let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];
+ let intersected = filterPatterns(neg, pos, '-?', true, options) || [];
+ let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
+ return subpatterns.join('|');
+}
+
+function splitToRanges(min, max) {
+ let nines = 1;
+ let zeros = 1;
+
+ let stop = countNines(min, nines);
+ let stops = new Set([max]);
+
+ while (min <= stop && stop <= max) {
+ stops.add(stop);
+ nines += 1;
+ stop = countNines(min, nines);
+ }
+
+ stop = countZeros(max + 1, zeros) - 1;
+
+ while (min < stop && stop <= max) {
+ stops.add(stop);
+ zeros += 1;
+ stop = countZeros(max + 1, zeros) - 1;
+ }
+
+ stops = [...stops];
+ stops.sort(compare);
+ return stops;
+}
+
+/**
+ * Convert a range to a regex pattern
+ * @param {Number} `start`
+ * @param {Number} `stop`
+ * @return {String}
+ */
+
+function rangeToPattern(start, stop, options) {
+ if (start === stop) {
+ return { pattern: start, count: [], digits: 0 };
+ }
+
+ let zipped = zip(start, stop);
+ let digits = zipped.length;
+ let pattern = '';
+ let count = 0;
+
+ for (let i = 0; i < digits; i++) {
+ let [startDigit, stopDigit] = zipped[i];
+
+ if (startDigit === stopDigit) {
+ pattern += startDigit;
+
+ } else if (startDigit !== '0' || stopDigit !== '9') {
+ pattern += toCharacterClass(startDigit, stopDigit, options);
+
+ } else {
+ count++;
+ }
+ }
+
+ if (count) {
+ pattern += options.shorthand === true ? '\\d' : '[0-9]';
+ }
+
+ return { pattern, count: [count], digits };
+}
+
+function splitToPatterns(min, max, tok, options) {
+ let ranges = splitToRanges(min, max);
+ let tokens = [];
+ let start = min;
+ let prev;
+
+ for (let i = 0; i < ranges.length; i++) {
+ let max = ranges[i];
+ let obj = rangeToPattern(String(start), String(max), options);
+ let zeros = '';
+
+ if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
+ if (prev.count.length > 1) {
+ prev.count.pop();
+ }
+
+ prev.count.push(obj.count[0]);
+ prev.string = prev.pattern + toQuantifier(prev.count);
+ start = max + 1;
+ continue;
+ }
+
+ if (tok.isPadded) {
+ zeros = padZeros(max, tok, options);
+ }
+
+ obj.string = zeros + obj.pattern + toQuantifier(obj.count);
+ tokens.push(obj);
+ start = max + 1;
+ prev = obj;
+ }
+
+ return tokens;
+}
+
+function filterPatterns(arr, comparison, prefix, intersection, options) {
+ let result = [];
+
+ for (let ele of arr) {
+ let { string } = ele;
+
+ // only push if _both_ are negative...
+ if (!intersection && !contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+
+ // or _both_ are positive
+ if (intersection && contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+ }
+ return result;
+}
+
+/**
+ * Zip strings
+ */
+
+function zip(a, b) {
+ let arr = [];
+ for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
+ return arr;
+}
+
+function compare(a, b) {
+ return a > b ? 1 : b > a ? -1 : 0;
+}
+
+function contains(arr, key, val) {
+ return arr.some(ele => ele[key] === val);
+}
+
+function countNines(min, len) {
+ return Number(String(min).slice(0, -len) + '9'.repeat(len));
+}
+
+function countZeros(integer, zeros) {
+ return integer - (integer % Math.pow(10, zeros));
+}
+
+function toQuantifier(digits) {
+ let [start = 0, stop = ''] = digits;
+ if (stop || start > 1) {
+ return `{${start + (stop ? ',' + stop : '')}}`;
+ }
+ return '';
+}
+
+function toCharacterClass(a, b, options) {
+ return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
+}
+
+function hasPadding(str) {
+ return /^-?(0+)\d/.test(str);
+}
+
+function padZeros(value, tok, options) {
+ if (!tok.isPadded) {
+ return value;
+ }
+
+ let diff = Math.abs(tok.maxLen - String(value).length);
+ let relax = options.relaxZeros !== false;
+
+ switch (diff) {
+ case 0:
+ return '';
+ case 1:
+ return relax ? '0?' : '0';
+ case 2:
+ return relax ? '0{0,2}' : '00';
+ default: {
+ return relax ? `0{0,${diff}}` : `0{${diff}}`;
+ }
+ }
+}
+
+/**
+ * Cache
+ */
+
+toRegexRange.cache = {};
+toRegexRange.clearCache = () => (toRegexRange.cache = {});
+
+/**
+ * Expose `toRegexRange`
+ */
+
+module.exports = toRegexRange;
+
+
+/***/ }),
+
+/***/ 3664:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const taskManager = __nccwpck_require__(2708);
+const async_1 = __nccwpck_require__(5679);
+const stream_1 = __nccwpck_require__(4630);
+const sync_1 = __nccwpck_require__(2405);
+const settings_1 = __nccwpck_require__(952);
+const utils = __nccwpck_require__(5444);
+async function FastGlob(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, async_1.default, options);
+ const result = await Promise.all(works);
+ return utils.array.flatten(result);
+}
+// https://github.com/typescript-eslint/typescript-eslint/issues/60
+// eslint-disable-next-line no-redeclare
+(function (FastGlob) {
+ function sync(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, sync_1.default, options);
+ return utils.array.flatten(works);
+ }
+ FastGlob.sync = sync;
+ function stream(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, stream_1.default, options);
+ /**
+ * The stream returned by the provider cannot work with an asynchronous iterator.
+ * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
+ * This affects performance (+25%). I don't see best solution right now.
+ */
+ return utils.stream.merge(works);
+ }
+ FastGlob.stream = stream;
+ function generateTasks(source, options) {
+ assertPatternsInput(source);
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ return taskManager.generate(patterns, settings);
+ }
+ FastGlob.generateTasks = generateTasks;
+ function isDynamicPattern(source, options) {
+ assertPatternsInput(source);
+ const settings = new settings_1.default(options);
+ return utils.pattern.isDynamicPattern(source, settings);
+ }
+ FastGlob.isDynamicPattern = isDynamicPattern;
+ function escapePath(source) {
+ assertPatternsInput(source);
+ return utils.path.escape(source);
+ }
+ FastGlob.escapePath = escapePath;
+})(FastGlob || (FastGlob = {}));
+function getWorks(source, _Provider, options) {
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ const tasks = taskManager.generate(patterns, settings);
+ const provider = new _Provider(settings);
+ return tasks.map(provider.read, provider);
+}
+function assertPatternsInput(input) {
+ const source = [].concat(input);
+ const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
+ if (!isValidSource) {
+ throw new TypeError('Patterns must be a string (non empty) or an array of strings');
+ }
+}
+module.exports = FastGlob;
+
+
+/***/ }),
+
+/***/ 2708:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
+const utils = __nccwpck_require__(5444);
+function generate(patterns, settings) {
+ const positivePatterns = getPositivePatterns(patterns);
+ const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);
+ const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));
+ const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));
+ const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
+ const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
+ return staticTasks.concat(dynamicTasks);
+}
+exports.generate = generate;
+function convertPatternsToTasks(positive, negative, dynamic) {
+ const positivePatternsGroup = groupPatternsByBaseDirectory(positive);
+ // When we have a global group â there is no reason to divide the patterns into independent tasks.
+ // In this case, the global task covers the rest.
+ if ('.' in positivePatternsGroup) {
+ const task = convertPatternGroupToTask('.', positive, negative, dynamic);
+ return [task];
+ }
+ return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);
+}
+exports.convertPatternsToTasks = convertPatternsToTasks;
+function getPositivePatterns(patterns) {
+ return utils.pattern.getPositivePatterns(patterns);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getNegativePatternsAsPositive(patterns, ignore) {
+ const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);
+ const positive = negative.map(utils.pattern.convertToPositivePattern);
+ return positive;
+}
+exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
+function groupPatternsByBaseDirectory(patterns) {
+ const group = {};
+ return patterns.reduce((collection, pattern) => {
+ const base = utils.pattern.getBaseDirectory(pattern);
+ if (base in collection) {
+ collection[base].push(pattern);
+ }
+ else {
+ collection[base] = [pattern];
+ }
+ return collection;
+ }, group);
+}
+exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
+function convertPatternGroupsToTasks(positive, negative, dynamic) {
+ return Object.keys(positive).map((base) => {
+ return convertPatternGroupToTask(base, positive[base], negative, dynamic);
+ });
+}
+exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
+function convertPatternGroupToTask(base, positive, negative, dynamic) {
+ return {
+ dynamic,
+ positive,
+ negative,
+ base,
+ patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))
+ };
+}
+exports.convertPatternGroupToTask = convertPatternGroupToTask;
+
+
+/***/ }),
+
+/***/ 5679:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __nccwpck_require__(2083);
+const provider_1 = __nccwpck_require__(257);
+class ProviderAsync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = [];
+ return new Promise((resolve, reject) => {
+ const stream = this.api(root, task, options);
+ stream.once('error', reject);
+ stream.on('data', (entry) => entries.push(options.transform(entry)));
+ stream.once('end', () => resolve(entries));
+ });
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderAsync;
+
+
+/***/ }),
+
+/***/ 6983:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __nccwpck_require__(5444);
+const partial_1 = __nccwpck_require__(5295);
+class DeepFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ }
+ getFilter(basePath, positive, negative) {
+ const matcher = this._getMatcher(positive);
+ const negativeRe = this._getNegativePatternsRe(negative);
+ return (entry) => this._filter(basePath, entry, matcher, negativeRe);
+ }
+ _getMatcher(patterns) {
+ return new partial_1.default(patterns, this._settings, this._micromatchOptions);
+ }
+ _getNegativePatternsRe(patterns) {
+ const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
+ return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
+ }
+ _filter(basePath, entry, matcher, negativeRe) {
+ if (this._isSkippedByDeep(basePath, entry.path)) {
+ return false;
+ }
+ if (this._isSkippedSymbolicLink(entry)) {
+ return false;
+ }
+ const filepath = utils.path.removeLeadingDotSegment(entry.path);
+ if (this._isSkippedByPositivePatterns(filepath, matcher)) {
+ return false;
+ }
+ return this._isSkippedByNegativePatterns(filepath, negativeRe);
+ }
+ _isSkippedByDeep(basePath, entryPath) {
+ /**
+ * Avoid unnecessary depth calculations when it doesn't matter.
+ */
+ if (this._settings.deep === Infinity) {
+ return false;
+ }
+ return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
+ }
+ _getEntryLevel(basePath, entryPath) {
+ const entryPathDepth = entryPath.split('/').length;
+ if (basePath === '') {
+ return entryPathDepth;
+ }
+ const basePathDepth = basePath.split('/').length;
+ return entryPathDepth - basePathDepth;
+ }
+ _isSkippedSymbolicLink(entry) {
+ return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
+ }
+ _isSkippedByPositivePatterns(entryPath, matcher) {
+ return !this._settings.baseNameMatch && !matcher.match(entryPath);
+ }
+ _isSkippedByNegativePatterns(entryPath, patternsRe) {
+ return !utils.pattern.matchAny(entryPath, patternsRe);
+ }
+}
+exports.default = DeepFilter;
+
+
+/***/ }),
+
+/***/ 1343:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __nccwpck_require__(5444);
+class EntryFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this.index = new Map();
+ }
+ getFilter(positive, negative) {
+ const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);
+ const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);
+ return (entry) => this._filter(entry, positiveRe, negativeRe);
+ }
+ _filter(entry, positiveRe, negativeRe) {
+ if (this._settings.unique && this._isDuplicateEntry(entry)) {
+ return false;
+ }
+ if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
+ return false;
+ }
+ if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
+ return false;
+ }
+ const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
+ const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
+ if (this._settings.unique && isMatched) {
+ this._createIndexRecord(entry);
+ }
+ return isMatched;
+ }
+ _isDuplicateEntry(entry) {
+ return this.index.has(entry.path);
+ }
+ _createIndexRecord(entry) {
+ this.index.set(entry.path, undefined);
+ }
+ _onlyFileFilter(entry) {
+ return this._settings.onlyFiles && !entry.dirent.isFile();
+ }
+ _onlyDirectoryFilter(entry) {
+ return this._settings.onlyDirectories && !entry.dirent.isDirectory();
+ }
+ _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
+ if (!this._settings.absolute) {
+ return false;
+ }
+ const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
+ return utils.pattern.matchAny(fullpath, patternsRe);
+ }
+ _isMatchToPatterns(entryPath, patternsRe) {
+ const filepath = utils.path.removeLeadingDotSegment(entryPath);
+ return utils.pattern.matchAny(filepath, patternsRe);
+ }
+}
+exports.default = EntryFilter;
+
+
+/***/ }),
+
+/***/ 6654:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __nccwpck_require__(5444);
+class ErrorFilter {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getFilter() {
+ return (error) => this._isNonFatalError(error);
+ }
+ _isNonFatalError(error) {
+ return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
+ }
+}
+exports.default = ErrorFilter;
+
+
+/***/ }),
+
+/***/ 2576:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __nccwpck_require__(5444);
+class Matcher {
+ constructor(_patterns, _settings, _micromatchOptions) {
+ this._patterns = _patterns;
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this._storage = [];
+ this._fillStorage();
+ }
+ _fillStorage() {
+ /**
+ * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).
+ * So, before expand patterns with brace expansion into separated patterns.
+ */
+ const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);
+ for (const pattern of patterns) {
+ const segments = this._getPatternSegments(pattern);
+ const sections = this._splitSegmentsIntoSections(segments);
+ this._storage.push({
+ complete: sections.length <= 1,
+ pattern,
+ segments,
+ sections
+ });
+ }
+ }
+ _getPatternSegments(pattern) {
+ const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);
+ return parts.map((part) => {
+ const dynamic = utils.pattern.isDynamicPattern(part, this._settings);
+ if (!dynamic) {
+ return {
+ dynamic: false,
+ pattern: part
+ };
+ }
+ return {
+ dynamic: true,
+ pattern: part,
+ patternRe: utils.pattern.makeRe(part, this._micromatchOptions)
+ };
+ });
+ }
+ _splitSegmentsIntoSections(segments) {
+ return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));
+ }
+}
+exports.default = Matcher;
+
+
+/***/ }),
+
+/***/ 5295:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const matcher_1 = __nccwpck_require__(2576);
+class PartialMatcher extends matcher_1.default {
+ match(filepath) {
+ const parts = filepath.split('/');
+ const levels = parts.length;
+ const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
+ for (const pattern of patterns) {
+ const section = pattern.sections[0];
+ /**
+ * In this case, the pattern has a globstar and we must read all directories unconditionally,
+ * but only if the level has reached the end of the first group.
+ *
+ * fixtures/{a,b}/**
+ * ^ true/false ^ always true
+ */
+ if (!pattern.complete && levels > section.length) {
+ return true;
+ }
+ const match = parts.every((part, index) => {
+ const segment = pattern.segments[index];
+ if (segment.dynamic && segment.patternRe.test(part)) {
+ return true;
+ }
+ if (!segment.dynamic && segment.pattern === part) {
+ return true;
+ }
+ return false;
+ });
+ if (match) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+exports.default = PartialMatcher;
+
+
+/***/ }),
+
+/***/ 257:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __nccwpck_require__(5622);
+const deep_1 = __nccwpck_require__(6983);
+const entry_1 = __nccwpck_require__(1343);
+const error_1 = __nccwpck_require__(6654);
+const entry_2 = __nccwpck_require__(4029);
+class Provider {
+ constructor(_settings) {
+ this._settings = _settings;
+ this.errorFilter = new error_1.default(this._settings);
+ this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
+ this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
+ this.entryTransformer = new entry_2.default(this._settings);
+ }
+ _getRootDirectory(task) {
+ return path.resolve(this._settings.cwd, task.base);
+ }
+ _getReaderOptions(task) {
+ const basePath = task.base === '.' ? '' : task.base;
+ return {
+ basePath,
+ pathSegmentSeparator: '/',
+ concurrency: this._settings.concurrency,
+ deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
+ entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
+ errorFilter: this.errorFilter.getFilter(),
+ followSymbolicLinks: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ stats: this._settings.stats,
+ throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
+ transform: this.entryTransformer.getTransformer()
+ };
+ }
+ _getMicromatchOptions() {
+ return {
+ dot: this._settings.dot,
+ matchBase: this._settings.baseNameMatch,
+ nobrace: !this._settings.braceExpansion,
+ nocase: !this._settings.caseSensitiveMatch,
+ noext: !this._settings.extglob,
+ noglobstar: !this._settings.globstar,
+ posix: true,
+ strictSlashes: false
+ };
+ }
+}
+exports.default = Provider;
+
+
+/***/ }),
+
+/***/ 4630:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __nccwpck_require__(2413);
+const stream_2 = __nccwpck_require__(2083);
+const provider_1 = __nccwpck_require__(257);
+class ProviderStream extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_2.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const source = this.api(root, task, options);
+ const destination = new stream_1.Readable({ objectMode: true, read: () => { } });
+ source
+ .once('error', (error) => destination.emit('error', error))
+ .on('data', (entry) => destination.emit('data', options.transform(entry)))
+ .once('end', () => destination.emit('end'));
+ destination
+ .once('close', () => source.destroy());
+ return destination;
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderStream;
+
+
+/***/ }),
+
+/***/ 2405:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const sync_1 = __nccwpck_require__(8821);
+const provider_1 = __nccwpck_require__(257);
+class ProviderSync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new sync_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = this.api(root, task, options);
+ return entries.map(options.transform);
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderSync;
+
+
+/***/ }),
+
+/***/ 4029:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __nccwpck_require__(5444);
+class EntryTransformer {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getTransformer() {
+ return (entry) => this._transform(entry);
+ }
+ _transform(entry) {
+ let filepath = entry.path;
+ if (this._settings.absolute) {
+ filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);
+ filepath = utils.path.unixify(filepath);
+ }
+ if (this._settings.markDirectories && entry.dirent.isDirectory()) {
+ filepath += '/';
+ }
+ if (!this._settings.objectMode) {
+ return filepath;
+ }
+ return Object.assign(Object.assign({}, entry), { path: filepath });
+ }
+}
+exports.default = EntryTransformer;
+
+
+/***/ }),
+
+/***/ 8062:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __nccwpck_require__(5622);
+const fsStat = __nccwpck_require__(109);
+const utils = __nccwpck_require__(5444);
+class Reader {
+ constructor(_settings) {
+ this._settings = _settings;
+ this._fsStatSettings = new fsStat.Settings({
+ followSymbolicLink: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
+ });
+ }
+ _getFullEntryPath(filepath) {
+ return path.resolve(this._settings.cwd, filepath);
+ }
+ _makeEntry(stats, pattern) {
+ const entry = {
+ name: pattern,
+ path: pattern,
+ dirent: utils.fs.createDirentFromStats(pattern, stats)
+ };
+ if (this._settings.stats) {
+ entry.stats = stats;
+ }
+ return entry;
+ }
+ _isFatalError(error) {
+ return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
+ }
+}
+exports.default = Reader;
+
+
+/***/ }),
+
+/***/ 2083:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __nccwpck_require__(2413);
+const fsStat = __nccwpck_require__(109);
+const fsWalk = __nccwpck_require__(6026);
+const reader_1 = __nccwpck_require__(8062);
+class ReaderStream extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkStream = fsWalk.walkStream;
+ this._stat = fsStat.stat;
+ }
+ dynamic(root, options) {
+ return this._walkStream(root, options);
+ }
+ static(patterns, options) {
+ const filepaths = patterns.map(this._getFullEntryPath, this);
+ const stream = new stream_1.PassThrough({ objectMode: true });
+ stream._write = (index, _enc, done) => {
+ return this._getEntry(filepaths[index], patterns[index], options)
+ .then((entry) => {
+ if (entry !== null && options.entryFilter(entry)) {
+ stream.push(entry);
+ }
+ if (index === filepaths.length - 1) {
+ stream.end();
+ }
+ done();
+ })
+ .catch(done);
+ };
+ for (let i = 0; i < filepaths.length; i++) {
+ stream.write(i);
+ }
+ return stream;
+ }
+ _getEntry(filepath, pattern, options) {
+ return this._getStat(filepath)
+ .then((stats) => this._makeEntry(stats, pattern))
+ .catch((error) => {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ });
+ }
+ _getStat(filepath) {
+ return new Promise((resolve, reject) => {
+ this._stat(filepath, this._fsStatSettings, (error, stats) => {
+ return error === null ? resolve(stats) : reject(error);
+ });
+ });
+ }
+}
+exports.default = ReaderStream;
+
+
+/***/ }),
+
+/***/ 8821:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fsStat = __nccwpck_require__(109);
+const fsWalk = __nccwpck_require__(6026);
+const reader_1 = __nccwpck_require__(8062);
+class ReaderSync extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkSync = fsWalk.walkSync;
+ this._statSync = fsStat.statSync;
+ }
+ dynamic(root, options) {
+ return this._walkSync(root, options);
+ }
+ static(patterns, options) {
+ const entries = [];
+ for (const pattern of patterns) {
+ const filepath = this._getFullEntryPath(pattern);
+ const entry = this._getEntry(filepath, pattern, options);
+ if (entry === null || !options.entryFilter(entry)) {
+ continue;
+ }
+ entries.push(entry);
+ }
+ return entries;
+ }
+ _getEntry(filepath, pattern, options) {
+ try {
+ const stats = this._getStat(filepath);
+ return this._makeEntry(stats, pattern);
+ }
+ catch (error) {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ }
+ }
+ _getStat(filepath) {
+ return this._statSync(filepath, this._fsStatSettings);
+ }
+}
+exports.default = ReaderSync;
+
+
+/***/ }),
+
+/***/ 952:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
+const fs = __nccwpck_require__(5747);
+const os = __nccwpck_require__(2087);
+/**
+ * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
+ * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
+ */
+const CPU_COUNT = Math.max(os.cpus().length, 1);
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
+ lstat: fs.lstat,
+ lstatSync: fs.lstatSync,
+ stat: fs.stat,
+ statSync: fs.statSync,
+ readdir: fs.readdir,
+ readdirSync: fs.readdirSync
+};
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.absolute = this._getValue(this._options.absolute, false);
+ this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
+ this.braceExpansion = this._getValue(this._options.braceExpansion, true);
+ this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
+ this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
+ this.cwd = this._getValue(this._options.cwd, process.cwd());
+ this.deep = this._getValue(this._options.deep, Infinity);
+ this.dot = this._getValue(this._options.dot, false);
+ this.extglob = this._getValue(this._options.extglob, true);
+ this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
+ this.fs = this._getFileSystemMethods(this._options.fs);
+ this.globstar = this._getValue(this._options.globstar, true);
+ this.ignore = this._getValue(this._options.ignore, []);
+ this.markDirectories = this._getValue(this._options.markDirectories, false);
+ this.objectMode = this._getValue(this._options.objectMode, false);
+ this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
+ this.onlyFiles = this._getValue(this._options.onlyFiles, true);
+ this.stats = this._getValue(this._options.stats, false);
+ this.suppressErrors = this._getValue(this._options.suppressErrors, false);
+ this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
+ this.unique = this._getValue(this._options.unique, true);
+ if (this.onlyDirectories) {
+ this.onlyFiles = false;
+ }
+ if (this.stats) {
+ this.objectMode = true;
+ }
+ }
+ _getValue(option, value) {
+ return option === undefined ? value : option;
+ }
+ _getFileSystemMethods(methods = {}) {
+ return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
+ }
+}
+exports.default = Settings;
+
+
+/***/ }),
+
+/***/ 5325:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.splitWhen = exports.flatten = void 0;
+function flatten(items) {
+ return items.reduce((collection, item) => [].concat(collection, item), []);
+}
+exports.flatten = flatten;
+function splitWhen(items, predicate) {
+ const result = [[]];
+ let groupIndex = 0;
+ for (const item of items) {
+ if (predicate(item)) {
+ groupIndex++;
+ result[groupIndex] = [];
+ }
+ else {
+ result[groupIndex].push(item);
+ }
+ }
+ return result;
+}
+exports.splitWhen = splitWhen;
+
+
+/***/ }),
+
+/***/ 1230:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isEnoentCodeError = void 0;
+function isEnoentCodeError(error) {
+ return error.code === 'ENOENT';
+}
+exports.isEnoentCodeError = isEnoentCodeError;
+
+
+/***/ }),
+
+/***/ 7543:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createDirentFromStats = void 0;
+class DirentFromStats {
+ constructor(name, stats) {
+ this.name = name;
+ this.isBlockDevice = stats.isBlockDevice.bind(stats);
+ this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
+ this.isDirectory = stats.isDirectory.bind(stats);
+ this.isFIFO = stats.isFIFO.bind(stats);
+ this.isFile = stats.isFile.bind(stats);
+ this.isSocket = stats.isSocket.bind(stats);
+ this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
+ }
+}
+function createDirentFromStats(name, stats) {
+ return new DirentFromStats(name, stats);
+}
+exports.createDirentFromStats = createDirentFromStats;
+
+
+/***/ }),
+
+/***/ 5444:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
+const array = __nccwpck_require__(5325);
+exports.array = array;
+const errno = __nccwpck_require__(1230);
+exports.errno = errno;
+const fs = __nccwpck_require__(7543);
+exports.fs = fs;
+const path = __nccwpck_require__(3873);
+exports.path = path;
+const pattern = __nccwpck_require__(1221);
+exports.pattern = pattern;
+const stream = __nccwpck_require__(8382);
+exports.stream = stream;
+const string = __nccwpck_require__(2203);
+exports.string = string;
+
+
+/***/ }),
+
+/***/ 3873:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
+const path = __nccwpck_require__(5622);
+const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
+const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
+/**
+ * Designed to work only with simple paths: `dir\\file`.
+ */
+function unixify(filepath) {
+ return filepath.replace(/\\/g, '/');
+}
+exports.unixify = unixify;
+function makeAbsolute(cwd, filepath) {
+ return path.resolve(cwd, filepath);
+}
+exports.makeAbsolute = makeAbsolute;
+function escape(pattern) {
+ return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
+}
+exports.escape = escape;
+function removeLeadingDotSegment(entry) {
+ // We do not use `startsWith` because this is 10x slower than current implementation for some cases.
+ // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
+ if (entry.charAt(0) === '.') {
+ const secondCharactery = entry.charAt(1);
+ if (secondCharactery === '/' || secondCharactery === '\\') {
+ return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
+ }
+ }
+ return entry;
+}
+exports.removeLeadingDotSegment = removeLeadingDotSegment;
+
+
+/***/ }),
+
+/***/ 1221:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
+const path = __nccwpck_require__(5622);
+const globParent = __nccwpck_require__(4655);
+const micromatch = __nccwpck_require__(3913);
+const picomatch = __nccwpck_require__(8569);
+const GLOBSTAR = '**';
+const ESCAPE_SYMBOL = '\\';
+const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
+const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/;
+const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/;
+const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/;
+const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/;
+function isStaticPattern(pattern, options = {}) {
+ return !isDynamicPattern(pattern, options);
+}
+exports.isStaticPattern = isStaticPattern;
+function isDynamicPattern(pattern, options = {}) {
+ /**
+ * A special case with an empty string is necessary for matching patterns that start with a forward slash.
+ * An empty string cannot be a dynamic pattern.
+ * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
+ */
+ if (pattern === '') {
+ return false;
+ }
+ /**
+ * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
+ * filepath directly (without read directory).
+ */
+ if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
+ return true;
+ }
+ if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ return false;
+}
+exports.isDynamicPattern = isDynamicPattern;
+function convertToPositivePattern(pattern) {
+ return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
+}
+exports.convertToPositivePattern = convertToPositivePattern;
+function convertToNegativePattern(pattern) {
+ return '!' + pattern;
+}
+exports.convertToNegativePattern = convertToNegativePattern;
+function isNegativePattern(pattern) {
+ return pattern.startsWith('!') && pattern[1] !== '(';
+}
+exports.isNegativePattern = isNegativePattern;
+function isPositivePattern(pattern) {
+ return !isNegativePattern(pattern);
+}
+exports.isPositivePattern = isPositivePattern;
+function getNegativePatterns(patterns) {
+ return patterns.filter(isNegativePattern);
+}
+exports.getNegativePatterns = getNegativePatterns;
+function getPositivePatterns(patterns) {
+ return patterns.filter(isPositivePattern);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getBaseDirectory(pattern) {
+ return globParent(pattern, { flipBackslashes: false });
+}
+exports.getBaseDirectory = getBaseDirectory;
+function hasGlobStar(pattern) {
+ return pattern.includes(GLOBSTAR);
+}
+exports.hasGlobStar = hasGlobStar;
+function endsWithSlashGlobStar(pattern) {
+ return pattern.endsWith('/' + GLOBSTAR);
+}
+exports.endsWithSlashGlobStar = endsWithSlashGlobStar;
+function isAffectDepthOfReadingPattern(pattern) {
+ const basename = path.basename(pattern);
+ return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
+}
+exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
+function expandPatternsWithBraceExpansion(patterns) {
+ return patterns.reduce((collection, pattern) => {
+ return collection.concat(expandBraceExpansion(pattern));
+ }, []);
+}
+exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
+function expandBraceExpansion(pattern) {
+ return micromatch.braces(pattern, {
+ expand: true,
+ nodupes: true
+ });
+}
+exports.expandBraceExpansion = expandBraceExpansion;
+function getPatternParts(pattern, options) {
+ let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
+ /**
+ * The scan method returns an empty array in some cases.
+ * See micromatch/picomatch#58 for more details.
+ */
+ if (parts.length === 0) {
+ parts = [pattern];
+ }
+ /**
+ * The scan method does not return an empty part for the pattern with a forward slash.
+ * This is another part of micromatch/picomatch#58.
+ */
+ if (parts[0].startsWith('/')) {
+ parts[0] = parts[0].slice(1);
+ parts.unshift('');
+ }
+ return parts;
+}
+exports.getPatternParts = getPatternParts;
+function makeRe(pattern, options) {
+ return micromatch.makeRe(pattern, options);
+}
+exports.makeRe = makeRe;
+function convertPatternsToRe(patterns, options) {
+ return patterns.map((pattern) => makeRe(pattern, options));
+}
+exports.convertPatternsToRe = convertPatternsToRe;
+function matchAny(entry, patternsRe) {
+ return patternsRe.some((patternRe) => patternRe.test(entry));
+}
+exports.matchAny = matchAny;
+
+
+/***/ }),
+
+/***/ 8382:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.merge = void 0;
+const merge2 = __nccwpck_require__(2578);
+function merge(streams) {
+ const mergedStream = merge2(streams);
+ streams.forEach((stream) => {
+ stream.once('error', (error) => mergedStream.emit('error', error));
+ });
+ mergedStream.once('close', () => propagateCloseEventToSources(streams));
+ mergedStream.once('end', () => propagateCloseEventToSources(streams));
+ return mergedStream;
+}
+exports.merge = merge;
+function propagateCloseEventToSources(streams) {
+ streams.forEach((stream) => stream.emit('close'));
+}
+
+
+/***/ }),
+
+/***/ 2203:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isEmpty = exports.isString = void 0;
+function isString(input) {
+ return typeof input === 'string';
+}
+exports.isString = isString;
+function isEmpty(input) {
+ return input === '';
+}
+exports.isEmpty = isEmpty;
+
+
+/***/ }),
+
+/***/ 7340:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var reusify = __nccwpck_require__(2113)
+
+function fastqueue (context, worker, concurrency) {
+ if (typeof context === 'function') {
+ concurrency = worker
+ worker = context
+ context = null
+ }
+
+ if (concurrency < 1) {
+ throw new Error('fastqueue concurrency must be greater than 1')
+ }
+
+ var cache = reusify(Task)
+ var queueHead = null
+ var queueTail = null
+ var _running = 0
+ var errorHandler = null
+
+ var self = {
+ push: push,
+ drain: noop,
+ saturated: noop,
+ pause: pause,
+ paused: false,
+ concurrency: concurrency,
+ running: running,
+ resume: resume,
+ idle: idle,
+ length: length,
+ getQueue: getQueue,
+ unshift: unshift,
+ empty: noop,
+ kill: kill,
+ killAndDrain: killAndDrain,
+ error: error
+ }
+
+ return self
+
+ function running () {
+ return _running
+ }
+
+ function pause () {
+ self.paused = true
+ }
+
+ function length () {
+ var current = queueHead
+ var counter = 0
+
+ while (current) {
+ current = current.next
+ counter++
+ }
+
+ return counter
+ }
+
+ function getQueue () {
+ var current = queueHead
+ var tasks = []
+
+ while (current) {
+ tasks.push(current.value)
+ current = current.next
+ }
+
+ return tasks
+ }
+
+ function resume () {
+ if (!self.paused) return
+ self.paused = false
+ for (var i = 0; i < self.concurrency; i++) {
+ _running++
+ release()
+ }
+ }
+
+ function idle () {
+ return _running === 0 && self.length() === 0
+ }
+
+ function push (value, done) {
+ var current = cache.get()
+
+ current.context = context
+ current.release = release
+ current.value = value
+ current.callback = done || noop
+ current.errorHandler = errorHandler
+
+ if (_running === self.concurrency || self.paused) {
+ if (queueTail) {
+ queueTail.next = current
+ queueTail = current
+ } else {
+ queueHead = current
+ queueTail = current
+ self.saturated()
+ }
+ } else {
+ _running++
+ worker.call(context, current.value, current.worked)
+ }
+ }
+
+ function unshift (value, done) {
+ var current = cache.get()
+
+ current.context = context
+ current.release = release
+ current.value = value
+ current.callback = done || noop
+
+ if (_running === self.concurrency || self.paused) {
+ if (queueHead) {
+ current.next = queueHead
+ queueHead = current
+ } else {
+ queueHead = current
+ queueTail = current
+ self.saturated()
+ }
+ } else {
+ _running++
+ worker.call(context, current.value, current.worked)
+ }
+ }
+
+ function release (holder) {
+ if (holder) {
+ cache.release(holder)
+ }
+ var next = queueHead
+ if (next) {
+ if (!self.paused) {
+ if (queueTail === queueHead) {
+ queueTail = null
+ }
+ queueHead = next.next
+ next.next = null
+ worker.call(context, next.value, next.worked)
+ if (queueTail === null) {
+ self.empty()
+ }
+ } else {
+ _running--
+ }
+ } else if (--_running === 0) {
+ self.drain()
+ }
+ }
+
+ function kill () {
+ queueHead = null
+ queueTail = null
+ self.drain = noop
+ }
+
+ function killAndDrain () {
+ queueHead = null
+ queueTail = null
+ self.drain()
+ self.drain = noop
+ }
+
+ function error (handler) {
+ errorHandler = handler
+ }
+}
+
+function noop () {}
+
+function Task () {
+ this.value = null
+ this.callback = noop
+ this.next = null
+ this.release = noop
+ this.context = null
+ this.errorHandler = null
+
+ var self = this
+
+ this.worked = function worked (err, result) {
+ var callback = self.callback
+ var errorHandler = self.errorHandler
+ var val = self.value
+ self.value = null
+ self.callback = noop
+ if (self.errorHandler) {
+ errorHandler(err, val)
+ }
+ callback.call(self.context, err, result)
+ self.release(self)
+ }
+}
+
+module.exports = fastqueue
+
+
+/***/ }),
+
+/***/ 4655:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var isGlob = __nccwpck_require__(4466);
+var pathPosixDirname = __nccwpck_require__(5622).posix.dirname;
+var isWin32 = __nccwpck_require__(2087).platform() === 'win32';
+
+var slash = '/';
+var backslash = /\\/g;
+var enclosure = /[\{\[].*[\/]*.*[\}\]]$/;
+var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
+var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
+
+/**
+ * @param {string} str
+ * @param {Object} opts
+ * @param {boolean} [opts.flipBackslashes=true]
+ */
+module.exports = function globParent(str, opts) {
+ var options = Object.assign({ flipBackslashes: true }, opts);
+
+ // flip windows path separators
+ if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
+ str = str.replace(backslash, slash);
+ }
+
+ // special case for strings ending in enclosure containing path separator
+ if (enclosure.test(str)) {
+ str += slash;
+ }
+
+ // preserves full path in case of trailing path separator
+ str += 'a';
+
+ // remove path parts that are globby
+ do {
+ str = pathPosixDirname(str);
+ } while (isGlob(str) || globby.test(str));
+
+ // remove escape chars and return result
+ return str.replace(escaped, '$1');
+};
+
+
+/***/ }),
+
+/***/ 6457:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const types_1 = __nccwpck_require__(4597);
+function createRejection(error, ...beforeErrorGroups) {
+ const promise = (async () => {
+ if (error instanceof types_1.RequestError) {
+ try {
+ for (const hooks of beforeErrorGroups) {
+ if (hooks) {
+ for (const hook of hooks) {
+ // eslint-disable-next-line no-await-in-loop
+ error = await hook(error);
+ }
+ }
+ }
+ }
+ catch (error_) {
+ error = error_;
+ }
+ }
+ throw error;
+ })();
+ const returnPromise = () => promise;
+ promise.json = returnPromise;
+ promise.text = returnPromise;
+ promise.buffer = returnPromise;
+ promise.on = returnPromise;
+ return promise;
+}
+exports.default = createRejection;
+
+
+/***/ }),
+
+/***/ 6056:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const events_1 = __nccwpck_require__(8614);
+const is_1 = __nccwpck_require__(7678);
+const PCancelable = __nccwpck_require__(9072);
+const types_1 = __nccwpck_require__(4597);
+const parse_body_1 = __nccwpck_require__(8220);
+const core_1 = __nccwpck_require__(94);
+const proxy_events_1 = __nccwpck_require__(3021);
+const get_buffer_1 = __nccwpck_require__(4500);
+const is_response_ok_1 = __nccwpck_require__(9298);
+const proxiedRequestEvents = [
+ 'request',
+ 'response',
+ 'redirect',
+ 'uploadProgress',
+ 'downloadProgress'
+];
+function asPromise(normalizedOptions) {
+ let globalRequest;
+ let globalResponse;
+ const emitter = new events_1.EventEmitter();
+ const promise = new PCancelable((resolve, reject, onCancel) => {
+ const makeRequest = (retryCount) => {
+ const request = new core_1.default(undefined, normalizedOptions);
+ request.retryCount = retryCount;
+ request._noPipe = true;
+ onCancel(() => request.destroy());
+ onCancel.shouldReject = false;
+ onCancel(() => reject(new types_1.CancelError(request)));
+ globalRequest = request;
+ request.once('response', async (response) => {
+ var _a;
+ response.retryCount = retryCount;
+ if (response.request.aborted) {
+ // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error
+ return;
+ }
+ // Download body
+ let rawBody;
+ try {
+ rawBody = await get_buffer_1.default(request);
+ response.rawBody = rawBody;
+ }
+ catch (_b) {
+ // The same error is caught below.
+ // See request.once('error')
+ return;
+ }
+ if (request._isAboutToError) {
+ return;
+ }
+ // Parse body
+ const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase();
+ const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding);
+ const { options } = request;
+ if (isCompressed && !options.decompress) {
+ response.body = rawBody;
+ }
+ else {
+ try {
+ response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding);
+ }
+ catch (error) {
+ // Fallback to `utf8`
+ response.body = rawBody.toString();
+ if (is_response_ok_1.isResponseOk(response)) {
+ request._beforeError(error);
+ return;
+ }
+ }
+ }
+ try {
+ for (const [index, hook] of options.hooks.afterResponse.entries()) {
+ // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise
+ // eslint-disable-next-line no-await-in-loop
+ response = await hook(response, async (updatedOptions) => {
+ const typedOptions = core_1.default.normalizeArguments(undefined, {
+ ...updatedOptions,
+ retry: {
+ calculateDelay: () => 0
+ },
+ throwHttpErrors: false,
+ resolveBodyOnly: false
+ }, options);
+ // Remove any further hooks for that request, because we'll call them anyway.
+ // The loop continues. We don't want duplicates (asPromise recursion).
+ typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index);
+ for (const hook of typedOptions.hooks.beforeRetry) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(typedOptions);
+ }
+ const promise = asPromise(typedOptions);
+ onCancel(() => {
+ promise.catch(() => { });
+ promise.cancel();
+ });
+ return promise;
+ });
+ }
+ }
+ catch (error) {
+ request._beforeError(new types_1.RequestError(error.message, error, request));
+ return;
+ }
+ if (!is_response_ok_1.isResponseOk(response)) {
+ request._beforeError(new types_1.HTTPError(response));
+ return;
+ }
+ globalResponse = response;
+ resolve(request.options.resolveBodyOnly ? response.body : response);
+ });
+ const onError = (error) => {
+ if (promise.isCanceled) {
+ return;
+ }
+ const { options } = request;
+ if (error instanceof types_1.HTTPError && !options.throwHttpErrors) {
+ const { response } = error;
+ resolve(request.options.resolveBodyOnly ? response.body : response);
+ return;
+ }
+ reject(error);
+ };
+ request.once('error', onError);
+ const previousBody = request.options.body;
+ request.once('retry', (newRetryCount, error) => {
+ var _a, _b;
+ if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) {
+ onError(error);
+ return;
+ }
+ makeRequest(newRetryCount);
+ });
+ proxy_events_1.default(request, emitter, proxiedRequestEvents);
+ };
+ makeRequest(0);
+ });
+ promise.on = (event, fn) => {
+ emitter.on(event, fn);
+ return promise;
+ };
+ const shortcut = (responseType) => {
+ const newPromise = (async () => {
+ // Wait until downloading has ended
+ await promise;
+ const { options } = globalResponse.request;
+ return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding);
+ })();
+ Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise));
+ return newPromise;
+ };
+ promise.json = () => {
+ const { headers } = globalRequest.options;
+ if (!globalRequest.writableFinished && headers.accept === undefined) {
+ headers.accept = 'application/json';
+ }
+ return shortcut('json');
+ };
+ promise.buffer = () => shortcut('buffer');
+ promise.text = () => shortcut('text');
+ return promise;
+}
+exports.default = asPromise;
+__exportStar(__nccwpck_require__(4597), exports);
+
+
+/***/ }),
+
+/***/ 1048:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const is_1 = __nccwpck_require__(7678);
+const normalizeArguments = (options, defaults) => {
+ if (is_1.default.null_(options.encoding)) {
+ throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead');
+ }
+ is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream);
+ is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType);
+ // `options.responseType`
+ if (options.responseType === undefined) {
+ options.responseType = 'text';
+ }
+ // `options.retry`
+ const { retry } = options;
+ if (defaults) {
+ options.retry = { ...defaults.retry };
+ }
+ else {
+ options.retry = {
+ calculateDelay: retryObject => retryObject.computedValue,
+ limit: 0,
+ methods: [],
+ statusCodes: [],
+ errorCodes: [],
+ maxRetryAfter: undefined
+ };
+ }
+ if (is_1.default.object(retry)) {
+ options.retry = {
+ ...options.retry,
+ ...retry
+ };
+ options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))];
+ options.retry.statusCodes = [...new Set(options.retry.statusCodes)];
+ options.retry.errorCodes = [...new Set(options.retry.errorCodes)];
+ }
+ else if (is_1.default.number(retry)) {
+ options.retry.limit = retry;
+ }
+ if (is_1.default.undefined(options.retry.maxRetryAfter)) {
+ options.retry.maxRetryAfter = Math.min(
+ // TypeScript is not smart enough to handle `.filter(x => is.number(x))`.
+ // eslint-disable-next-line unicorn/no-fn-reference-in-iterator
+ ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number));
+ }
+ // `options.pagination`
+ if (is_1.default.object(options.pagination)) {
+ if (defaults) {
+ options.pagination = {
+ ...defaults.pagination,
+ ...options.pagination
+ };
+ }
+ const { pagination } = options;
+ if (!is_1.default.function_(pagination.transform)) {
+ throw new Error('`options.pagination.transform` must be implemented');
+ }
+ if (!is_1.default.function_(pagination.shouldContinue)) {
+ throw new Error('`options.pagination.shouldContinue` must be implemented');
+ }
+ if (!is_1.default.function_(pagination.filter)) {
+ throw new TypeError('`options.pagination.filter` must be implemented');
+ }
+ if (!is_1.default.function_(pagination.paginate)) {
+ throw new Error('`options.pagination.paginate` must be implemented');
+ }
+ }
+ // JSON mode
+ if (options.responseType === 'json' && options.headers.accept === undefined) {
+ options.headers.accept = 'application/json';
+ }
+ return options;
+};
+exports.default = normalizeArguments;
+
+
+/***/ }),
+
+/***/ 8220:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const types_1 = __nccwpck_require__(4597);
+const parseBody = (response, responseType, parseJson, encoding) => {
+ const { rawBody } = response;
+ try {
+ if (responseType === 'text') {
+ return rawBody.toString(encoding);
+ }
+ if (responseType === 'json') {
+ return rawBody.length === 0 ? '' : parseJson(rawBody.toString());
+ }
+ if (responseType === 'buffer') {
+ return rawBody;
+ }
+ throw new types_1.ParseError({
+ message: `Unknown body type '${responseType}'`,
+ name: 'Error'
+ }, response);
+ }
+ catch (error) {
+ throw new types_1.ParseError(error, response);
+ }
+};
+exports.default = parseBody;
+
+
+/***/ }),
+
+/***/ 4597:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.CancelError = exports.ParseError = void 0;
+const core_1 = __nccwpck_require__(94);
+/**
+An error to be thrown when server response code is 2xx, and parsing body fails.
+Includes a `response` property.
+*/
+class ParseError extends core_1.RequestError {
+ constructor(error, response) {
+ const { options } = response.request;
+ super(`${error.message} in "${options.url.toString()}"`, error, response.request);
+ this.name = 'ParseError';
+ }
+}
+exports.ParseError = ParseError;
+/**
+An error to be thrown when the request is aborted with `.cancel()`.
+*/
+class CancelError extends core_1.RequestError {
+ constructor(request) {
+ super('Promise was canceled', {}, request);
+ this.name = 'CancelError';
+ }
+ get isCanceled() {
+ return true;
+ }
+}
+exports.CancelError = CancelError;
+__exportStar(__nccwpck_require__(94), exports);
+
+
+/***/ }),
+
+/***/ 3462:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.retryAfterStatusCodes = void 0;
+exports.retryAfterStatusCodes = new Set([413, 429, 503]);
+const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => {
+ if (attemptCount > retryOptions.limit) {
+ return 0;
+ }
+ const hasMethod = retryOptions.methods.includes(error.options.method);
+ const hasErrorCode = retryOptions.errorCodes.includes(error.code);
+ const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode);
+ if (!hasMethod || (!hasErrorCode && !hasStatusCode)) {
+ return 0;
+ }
+ if (error.response) {
+ if (retryAfter) {
+ if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) {
+ return 0;
+ }
+ return retryAfter;
+ }
+ if (error.response.statusCode === 413) {
+ return 0;
+ }
+ }
+ const noise = Math.random() * 100;
+ return ((2 ** (attemptCount - 1)) * 1000) + noise;
+};
+exports.default = calculateRetryDelay;
+
+
+/***/ }),
+
+/***/ 94:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0;
+const util_1 = __nccwpck_require__(1669);
+const stream_1 = __nccwpck_require__(2413);
+const fs_1 = __nccwpck_require__(5747);
+const url_1 = __nccwpck_require__(8835);
+const http = __nccwpck_require__(8605);
+const http_1 = __nccwpck_require__(8605);
+const https = __nccwpck_require__(7211);
+const http_timer_1 = __nccwpck_require__(8097);
+const cacheable_lookup_1 = __nccwpck_require__(2286);
+const CacheableRequest = __nccwpck_require__(8116);
+const decompressResponse = __nccwpck_require__(2391);
+// @ts-expect-error Missing types
+const http2wrapper = __nccwpck_require__(4645);
+const lowercaseKeys = __nccwpck_require__(9662);
+const is_1 = __nccwpck_require__(7678);
+const get_body_size_1 = __nccwpck_require__(4564);
+const is_form_data_1 = __nccwpck_require__(40);
+const proxy_events_1 = __nccwpck_require__(3021);
+const timed_out_1 = __nccwpck_require__(2454);
+const url_to_options_1 = __nccwpck_require__(8026);
+const options_to_url_1 = __nccwpck_require__(9219);
+const weakable_map_1 = __nccwpck_require__(7288);
+const get_buffer_1 = __nccwpck_require__(4500);
+const dns_ip_version_1 = __nccwpck_require__(4993);
+const is_response_ok_1 = __nccwpck_require__(9298);
+const deprecation_warning_1 = __nccwpck_require__(397);
+const normalize_arguments_1 = __nccwpck_require__(1048);
+const calculate_retry_delay_1 = __nccwpck_require__(3462);
+const globalDnsCache = new cacheable_lookup_1.default();
+const kRequest = Symbol('request');
+const kResponse = Symbol('response');
+const kResponseSize = Symbol('responseSize');
+const kDownloadedSize = Symbol('downloadedSize');
+const kBodySize = Symbol('bodySize');
+const kUploadedSize = Symbol('uploadedSize');
+const kServerResponsesPiped = Symbol('serverResponsesPiped');
+const kUnproxyEvents = Symbol('unproxyEvents');
+const kIsFromCache = Symbol('isFromCache');
+const kCancelTimeouts = Symbol('cancelTimeouts');
+const kStartedReading = Symbol('startedReading');
+const kStopReading = Symbol('stopReading');
+const kTriggerRead = Symbol('triggerRead');
+const kBody = Symbol('body');
+const kJobs = Symbol('jobs');
+const kOriginalResponse = Symbol('originalResponse');
+const kRetryTimeout = Symbol('retryTimeout');
+exports.kIsNormalizedAlready = Symbol('isNormalizedAlready');
+const supportsBrotli = is_1.default.string(process.versions.brotli);
+exports.withoutBody = new Set(['GET', 'HEAD']);
+exports.knownHookEvents = [
+ 'init',
+ 'beforeRequest',
+ 'beforeRedirect',
+ 'beforeError',
+ 'beforeRetry',
+ // Promise-Only
+ 'afterResponse'
+];
+function validateSearchParameters(searchParameters) {
+ // eslint-disable-next-line guard-for-in
+ for (const key in searchParameters) {
+ const value = searchParameters[key];
+ if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) {
+ throw new TypeError(`The \`searchParams\` value '${String(value)}' must be a string, number, boolean or null`);
+ }
+ }
+}
+function isClientRequest(clientRequest) {
+ return is_1.default.object(clientRequest) && !('statusCode' in clientRequest);
+}
+const cacheableStore = new weakable_map_1.default();
+const waitForOpenFile = async (file) => new Promise((resolve, reject) => {
+ const onError = (error) => {
+ reject(error);
+ };
+ // Node.js 12 has incomplete types
+ if (!file.pending) {
+ resolve();
+ }
+ file.once('error', onError);
+ file.once('ready', () => {
+ file.off('error', onError);
+ resolve();
+ });
+});
+const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]);
+const nonEnumerableProperties = [
+ 'context',
+ 'body',
+ 'json',
+ 'form'
+];
+exports.setNonEnumerableProperties = (sources, to) => {
+ // Non enumerable properties shall not be merged
+ const properties = {};
+ for (const source of sources) {
+ if (!source) {
+ continue;
+ }
+ for (const name of nonEnumerableProperties) {
+ if (!(name in source)) {
+ continue;
+ }
+ properties[name] = {
+ writable: true,
+ configurable: true,
+ enumerable: false,
+ // @ts-expect-error TS doesn't see the check above
+ value: source[name]
+ };
+ }
+ }
+ Object.defineProperties(to, properties);
+};
+/**
+An error to be thrown when a request fails.
+Contains a `code` property with error class code, like `ECONNREFUSED`.
+*/
+class RequestError extends Error {
+ constructor(message, error, self) {
+ var _a;
+ super(message);
+ Error.captureStackTrace(this, this.constructor);
+ this.name = 'RequestError';
+ this.code = error.code;
+ if (self instanceof Request) {
+ Object.defineProperty(this, 'request', {
+ enumerable: false,
+ value: self
+ });
+ Object.defineProperty(this, 'response', {
+ enumerable: false,
+ value: self[kResponse]
+ });
+ Object.defineProperty(this, 'options', {
+ // This fails because of TS 3.7.2 useDefineForClassFields
+ // Ref: https://github.com/microsoft/TypeScript/issues/34972
+ enumerable: false,
+ value: self.options
+ });
+ }
+ else {
+ Object.defineProperty(this, 'options', {
+ // This fails because of TS 3.7.2 useDefineForClassFields
+ // Ref: https://github.com/microsoft/TypeScript/issues/34972
+ enumerable: false,
+ value: self
+ });
+ }
+ this.timings = (_a = this.request) === null || _a === void 0 ? void 0 : _a.timings;
+ // Recover the original stacktrace
+ if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) {
+ const indexOfMessage = this.stack.indexOf(this.message) + this.message.length;
+ const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse();
+ const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse();
+ // Remove duplicated traces
+ while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) {
+ thisStackTrace.shift();
+ }
+ this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`;
+ }
+ }
+}
+exports.RequestError = RequestError;
+/**
+An error to be thrown when the server redirects you more than ten times.
+Includes a `response` property.
+*/
+class MaxRedirectsError extends RequestError {
+ constructor(request) {
+ super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request);
+ this.name = 'MaxRedirectsError';
+ }
+}
+exports.MaxRedirectsError = MaxRedirectsError;
+/**
+An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304.
+Includes a `response` property.
+*/
+class HTTPError extends RequestError {
+ constructor(response) {
+ super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request);
+ this.name = 'HTTPError';
+ }
+}
+exports.HTTPError = HTTPError;
+/**
+An error to be thrown when a cache method fails.
+For example, if the database goes down or there's a filesystem error.
+*/
+class CacheError extends RequestError {
+ constructor(error, request) {
+ super(error.message, error, request);
+ this.name = 'CacheError';
+ }
+}
+exports.CacheError = CacheError;
+/**
+An error to be thrown when the request body is a stream and an error occurs while reading from that stream.
+*/
+class UploadError extends RequestError {
+ constructor(error, request) {
+ super(error.message, error, request);
+ this.name = 'UploadError';
+ }
+}
+exports.UploadError = UploadError;
+/**
+An error to be thrown when the request is aborted due to a timeout.
+Includes an `event` and `timings` property.
+*/
+class TimeoutError extends RequestError {
+ constructor(error, timings, request) {
+ super(error.message, error, request);
+ this.name = 'TimeoutError';
+ this.event = error.event;
+ this.timings = timings;
+ }
+}
+exports.TimeoutError = TimeoutError;
+/**
+An error to be thrown when reading from response stream fails.
+*/
+class ReadError extends RequestError {
+ constructor(error, request) {
+ super(error.message, error, request);
+ this.name = 'ReadError';
+ }
+}
+exports.ReadError = ReadError;
+/**
+An error to be thrown when given an unsupported protocol.
+*/
+class UnsupportedProtocolError extends RequestError {
+ constructor(options) {
+ super(`Unsupported protocol "${options.url.protocol}"`, {}, options);
+ this.name = 'UnsupportedProtocolError';
+ }
+}
+exports.UnsupportedProtocolError = UnsupportedProtocolError;
+const proxiedRequestEvents = [
+ 'socket',
+ 'connect',
+ 'continue',
+ 'information',
+ 'upgrade',
+ 'timeout'
+];
+class Request extends stream_1.Duplex {
+ constructor(url, options = {}, defaults) {
+ super({
+ // This must be false, to enable throwing after destroy
+ // It is used for retry logic in Promise API
+ autoDestroy: false,
+ // It needs to be zero because we're just proxying the data to another stream
+ highWaterMark: 0
+ });
+ this[kDownloadedSize] = 0;
+ this[kUploadedSize] = 0;
+ this.requestInitialized = false;
+ this[kServerResponsesPiped] = new Set();
+ this.redirects = [];
+ this[kStopReading] = false;
+ this[kTriggerRead] = false;
+ this[kJobs] = [];
+ this.retryCount = 0;
+ // TODO: Remove this when targeting Node.js >= 12
+ this._progressCallbacks = [];
+ const unlockWrite = () => this._unlockWrite();
+ const lockWrite = () => this._lockWrite();
+ this.on('pipe', (source) => {
+ source.prependListener('data', unlockWrite);
+ source.on('data', lockWrite);
+ source.prependListener('end', unlockWrite);
+ source.on('end', lockWrite);
+ });
+ this.on('unpipe', (source) => {
+ source.off('data', unlockWrite);
+ source.off('data', lockWrite);
+ source.off('end', unlockWrite);
+ source.off('end', lockWrite);
+ });
+ this.on('pipe', source => {
+ if (source instanceof http_1.IncomingMessage) {
+ this.options.headers = {
+ ...source.headers,
+ ...this.options.headers
+ };
+ }
+ });
+ const { json, body, form } = options;
+ if (json || body || form) {
+ this._lockWrite();
+ }
+ if (exports.kIsNormalizedAlready in options) {
+ this.options = options;
+ }
+ else {
+ try {
+ // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible
+ this.options = this.constructor.normalizeArguments(url, options, defaults);
+ }
+ catch (error) {
+ // TODO: Move this to `_destroy()`
+ if (is_1.default.nodeStream(options.body)) {
+ options.body.destroy();
+ }
+ this.destroy(error);
+ return;
+ }
+ }
+ (async () => {
+ var _a;
+ try {
+ if (this.options.body instanceof fs_1.ReadStream) {
+ await waitForOpenFile(this.options.body);
+ }
+ const { url: normalizedURL } = this.options;
+ if (!normalizedURL) {
+ throw new TypeError('Missing `url` property');
+ }
+ this.requestUrl = normalizedURL.toString();
+ decodeURI(this.requestUrl);
+ await this._finalizeBody();
+ await this._makeRequest();
+ if (this.destroyed) {
+ (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy();
+ return;
+ }
+ // Queued writes etc.
+ for (const job of this[kJobs]) {
+ job();
+ }
+ // Prevent memory leak
+ this[kJobs].length = 0;
+ this.requestInitialized = true;
+ }
+ catch (error) {
+ if (error instanceof RequestError) {
+ this._beforeError(error);
+ return;
+ }
+ // This is a workaround for https://github.com/nodejs/node/issues/33335
+ if (!this.destroyed) {
+ this.destroy(error);
+ }
+ }
+ })();
+ }
+ static normalizeArguments(url, options, defaults) {
+ var _a, _b, _c, _d, _e;
+ const rawOptions = options;
+ if (is_1.default.object(url) && !is_1.default.urlInstance(url)) {
+ options = { ...defaults, ...url, ...options };
+ }
+ else {
+ if (url && options && options.url !== undefined) {
+ throw new TypeError('The `url` option is mutually exclusive with the `input` argument');
+ }
+ options = { ...defaults, ...options };
+ if (url !== undefined) {
+ options.url = url;
+ }
+ if (is_1.default.urlInstance(options.url)) {
+ options.url = new url_1.URL(options.url.toString());
+ }
+ }
+ // TODO: Deprecate URL options in Got 12.
+ // Support extend-specific options
+ if (options.cache === false) {
+ options.cache = undefined;
+ }
+ if (options.dnsCache === false) {
+ options.dnsCache = undefined;
+ }
+ // Nice type assertions
+ is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method);
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers);
+ is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl);
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar);
+ is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams);
+ is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache);
+ is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout);
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context);
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect);
+ is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody);
+ is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress);
+ is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion);
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https);
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized);
+ if (options.https) {
+ is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized);
+ is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity);
+ is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority);
+ is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key);
+ is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate);
+ is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase);
+ is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx);
+ }
+ is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions);
+ // `options.method`
+ if (is_1.default.string(options.method)) {
+ options.method = options.method.toUpperCase();
+ }
+ else {
+ options.method = 'GET';
+ }
+ // `options.headers`
+ if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) {
+ options.headers = { ...options.headers };
+ }
+ else {
+ options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers });
+ }
+ // Disallow legacy `url.Url`
+ if ('slashes' in options) {
+ throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.');
+ }
+ // `options.auth`
+ if ('auth' in options) {
+ throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.');
+ }
+ // `options.searchParams`
+ if ('searchParams' in options) {
+ if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) {
+ let searchParameters;
+ if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) {
+ searchParameters = new url_1.URLSearchParams(options.searchParams);
+ }
+ else {
+ validateSearchParameters(options.searchParams);
+ searchParameters = new url_1.URLSearchParams();
+ // eslint-disable-next-line guard-for-in
+ for (const key in options.searchParams) {
+ const value = options.searchParams[key];
+ if (value === null) {
+ searchParameters.append(key, '');
+ }
+ else if (value !== undefined) {
+ searchParameters.append(key, value);
+ }
+ }
+ }
+ // `normalizeArguments()` is also used to merge options
+ (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => {
+ // Only use default if one isn't already defined
+ if (!searchParameters.has(key)) {
+ searchParameters.append(key, value);
+ }
+ });
+ options.searchParams = searchParameters;
+ }
+ }
+ // `options.username` & `options.password`
+ options.username = (_b = options.username) !== null && _b !== void 0 ? _b : '';
+ options.password = (_c = options.password) !== null && _c !== void 0 ? _c : '';
+ // `options.prefixUrl` & `options.url`
+ if (is_1.default.undefined(options.prefixUrl)) {
+ options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : '';
+ }
+ else {
+ options.prefixUrl = options.prefixUrl.toString();
+ if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) {
+ options.prefixUrl += '/';
+ }
+ }
+ if (is_1.default.string(options.url)) {
+ if (options.url.startsWith('/')) {
+ throw new Error('`input` must not start with a slash when using `prefixUrl`');
+ }
+ options.url = options_to_url_1.default(options.prefixUrl + options.url, options);
+ }
+ else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) {
+ options.url = options_to_url_1.default(options.prefixUrl, options);
+ }
+ if (options.url) {
+ if ('port' in options) {
+ delete options.port;
+ }
+ // Make it possible to change `options.prefixUrl`
+ let { prefixUrl } = options;
+ Object.defineProperty(options, 'prefixUrl', {
+ set: (value) => {
+ const url = options.url;
+ if (!url.href.startsWith(value)) {
+ throw new Error(`Cannot change \`prefixUrl\` from ${prefixUrl} to ${value}: ${url.href}`);
+ }
+ options.url = new url_1.URL(value + url.href.slice(prefixUrl.length));
+ prefixUrl = value;
+ },
+ get: () => prefixUrl
+ });
+ // Support UNIX sockets
+ let { protocol } = options.url;
+ if (protocol === 'unix:') {
+ protocol = 'http:';
+ options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`);
+ }
+ // Set search params
+ if (options.searchParams) {
+ // eslint-disable-next-line @typescript-eslint/no-base-to-string
+ options.url.search = options.searchParams.toString();
+ }
+ // Protocol check
+ if (protocol !== 'http:' && protocol !== 'https:') {
+ throw new UnsupportedProtocolError(options);
+ }
+ // Update `username`
+ if (options.username === '') {
+ options.username = options.url.username;
+ }
+ else {
+ options.url.username = options.username;
+ }
+ // Update `password`
+ if (options.password === '') {
+ options.password = options.url.password;
+ }
+ else {
+ options.url.password = options.password;
+ }
+ }
+ // `options.cookieJar`
+ const { cookieJar } = options;
+ if (cookieJar) {
+ let { setCookie, getCookieString } = cookieJar;
+ is_1.assert.function_(setCookie);
+ is_1.assert.function_(getCookieString);
+ /* istanbul ignore next: Horrible `tough-cookie` v3 check */
+ if (setCookie.length === 4 && getCookieString.length === 0) {
+ setCookie = util_1.promisify(setCookie.bind(options.cookieJar));
+ getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar));
+ options.cookieJar = {
+ setCookie,
+ getCookieString: getCookieString
+ };
+ }
+ }
+ // `options.cache`
+ const { cache } = options;
+ if (cache) {
+ if (!cacheableStore.has(cache)) {
+ cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => {
+ const result = requestOptions[kRequest](requestOptions, handler);
+ // TODO: remove this when `cacheable-request` supports async request functions.
+ if (is_1.default.promise(result)) {
+ // @ts-expect-error
+ // We only need to implement the error handler in order to support HTTP2 caching.
+ // The result will be a promise anyway.
+ result.once = (event, handler) => {
+ if (event === 'error') {
+ result.catch(handler);
+ }
+ else if (event === 'abort') {
+ // The empty catch is needed here in case when
+ // it rejects before it's `await`ed in `_makeRequest`.
+ (async () => {
+ try {
+ const request = (await result);
+ request.once('abort', handler);
+ }
+ catch (_a) { }
+ })();
+ }
+ else {
+ /* istanbul ignore next: safety check */
+ throw new Error(`Unknown HTTP2 promise event: ${event}`);
+ }
+ return result;
+ };
+ }
+ return result;
+ }), cache));
+ }
+ }
+ // `options.cacheOptions`
+ options.cacheOptions = { ...options.cacheOptions };
+ // `options.dnsCache`
+ if (options.dnsCache === true) {
+ options.dnsCache = globalDnsCache;
+ }
+ else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) {
+ throw new TypeError(`Parameter \`dnsCache\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`);
+ }
+ // `options.timeout`
+ if (is_1.default.number(options.timeout)) {
+ options.timeout = { request: options.timeout };
+ }
+ else if (defaults && options.timeout !== defaults.timeout) {
+ options.timeout = {
+ ...defaults.timeout,
+ ...options.timeout
+ };
+ }
+ else {
+ options.timeout = { ...options.timeout };
+ }
+ // `options.context`
+ if (!options.context) {
+ options.context = {};
+ }
+ // `options.hooks`
+ const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks);
+ options.hooks = { ...options.hooks };
+ for (const event of exports.knownHookEvents) {
+ if (event in options.hooks) {
+ if (is_1.default.array(options.hooks[event])) {
+ // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044
+ options.hooks[event] = [...options.hooks[event]];
+ }
+ else {
+ throw new TypeError(`Parameter \`${event}\` must be an Array, got ${is_1.default(options.hooks[event])}`);
+ }
+ }
+ else {
+ options.hooks[event] = [];
+ }
+ }
+ if (defaults && !areHooksDefault) {
+ for (const event of exports.knownHookEvents) {
+ const defaultHooks = defaults.hooks[event];
+ if (defaultHooks.length > 0) {
+ // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044
+ options.hooks[event] = [
+ ...defaults.hooks[event],
+ ...options.hooks[event]
+ ];
+ }
+ }
+ }
+ // DNS options
+ if ('family' in options) {
+ deprecation_warning_1.default('"options.family" was never documented, please use "options.dnsLookupIpVersion"');
+ }
+ // HTTPS options
+ if (defaults === null || defaults === void 0 ? void 0 : defaults.https) {
+ options.https = { ...defaults.https, ...options.https };
+ }
+ if ('rejectUnauthorized' in options) {
+ deprecation_warning_1.default('"options.rejectUnauthorized" is now deprecated, please use "options.https.rejectUnauthorized"');
+ }
+ if ('checkServerIdentity' in options) {
+ deprecation_warning_1.default('"options.checkServerIdentity" was never documented, please use "options.https.checkServerIdentity"');
+ }
+ if ('ca' in options) {
+ deprecation_warning_1.default('"options.ca" was never documented, please use "options.https.certificateAuthority"');
+ }
+ if ('key' in options) {
+ deprecation_warning_1.default('"options.key" was never documented, please use "options.https.key"');
+ }
+ if ('cert' in options) {
+ deprecation_warning_1.default('"options.cert" was never documented, please use "options.https.certificate"');
+ }
+ if ('passphrase' in options) {
+ deprecation_warning_1.default('"options.passphrase" was never documented, please use "options.https.passphrase"');
+ }
+ if ('pfx' in options) {
+ deprecation_warning_1.default('"options.pfx" was never documented, please use "options.https.pfx"');
+ }
+ // Other options
+ if ('followRedirects' in options) {
+ throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');
+ }
+ if (options.agent) {
+ for (const key in options.agent) {
+ if (key !== 'http' && key !== 'https' && key !== 'http2') {
+ throw new TypeError(`Expected the \`options.agent\` properties to be \`http\`, \`https\` or \`http2\`, got \`${key}\``);
+ }
+ }
+ }
+ options.maxRedirects = (_e = options.maxRedirects) !== null && _e !== void 0 ? _e : 0;
+ // Set non-enumerable properties
+ exports.setNonEnumerableProperties([defaults, rawOptions], options);
+ return normalize_arguments_1.default(options, defaults);
+ }
+ _lockWrite() {
+ const onLockedWrite = () => {
+ throw new TypeError('The payload has been already provided');
+ };
+ this.write = onLockedWrite;
+ this.end = onLockedWrite;
+ }
+ _unlockWrite() {
+ this.write = super.write;
+ this.end = super.end;
+ }
+ async _finalizeBody() {
+ const { options } = this;
+ const { headers } = options;
+ const isForm = !is_1.default.undefined(options.form);
+ const isJSON = !is_1.default.undefined(options.json);
+ const isBody = !is_1.default.undefined(options.body);
+ const hasPayload = isForm || isJSON || isBody;
+ const cannotHaveBody = exports.withoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody);
+ this._cannotHaveBody = cannotHaveBody;
+ if (hasPayload) {
+ if (cannotHaveBody) {
+ throw new TypeError(`The \`${options.method}\` method cannot be used with a body`);
+ }
+ if ([isBody, isForm, isJSON].filter(isTrue => isTrue).length > 1) {
+ throw new TypeError('The `body`, `json` and `form` options are mutually exclusive');
+ }
+ if (isBody &&
+ !(options.body instanceof stream_1.Readable) &&
+ !is_1.default.string(options.body) &&
+ !is_1.default.buffer(options.body) &&
+ !is_form_data_1.default(options.body)) {
+ throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');
+ }
+ if (isForm && !is_1.default.object(options.form)) {
+ throw new TypeError('The `form` option must be an Object');
+ }
+ {
+ // Serialize body
+ const noContentType = !is_1.default.string(headers['content-type']);
+ if (isBody) {
+ // Special case for https://github.com/form-data/form-data
+ if (is_form_data_1.default(options.body) && noContentType) {
+ headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`;
+ }
+ this[kBody] = options.body;
+ }
+ else if (isForm) {
+ if (noContentType) {
+ headers['content-type'] = 'application/x-www-form-urlencoded';
+ }
+ this[kBody] = (new url_1.URLSearchParams(options.form)).toString();
+ }
+ else {
+ if (noContentType) {
+ headers['content-type'] = 'application/json';
+ }
+ this[kBody] = options.stringifyJson(options.json);
+ }
+ const uploadBodySize = await get_body_size_1.default(this[kBody], options.headers);
+ // See https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD send a Content-Length in a request message when
+ // no Transfer-Encoding is sent and the request method defines a meaning
+ // for an enclosed payload body. For example, a Content-Length header
+ // field is normally sent in a POST request even when the value is 0
+ // (indicating an empty payload body). A user agent SHOULD NOT send a
+ // Content-Length header field when the request message does not contain
+ // a payload body and the method semantics do not anticipate such a
+ // body.
+ if (is_1.default.undefined(headers['content-length']) && is_1.default.undefined(headers['transfer-encoding'])) {
+ if (!cannotHaveBody && !is_1.default.undefined(uploadBodySize)) {
+ headers['content-length'] = String(uploadBodySize);
+ }
+ }
+ }
+ }
+ else if (cannotHaveBody) {
+ this._lockWrite();
+ }
+ else {
+ this._unlockWrite();
+ }
+ this[kBodySize] = Number(headers['content-length']) || undefined;
+ }
+ async _onResponseBase(response) {
+ const { options } = this;
+ const { url } = options;
+ this[kOriginalResponse] = response;
+ if (options.decompress) {
+ response = decompressResponse(response);
+ }
+ const statusCode = response.statusCode;
+ const typedResponse = response;
+ typedResponse.statusMessage = typedResponse.statusMessage ? typedResponse.statusMessage : http.STATUS_CODES[statusCode];
+ typedResponse.url = options.url.toString();
+ typedResponse.requestUrl = this.requestUrl;
+ typedResponse.redirectUrls = this.redirects;
+ typedResponse.request = this;
+ typedResponse.isFromCache = response.fromCache || false;
+ typedResponse.ip = this.ip;
+ typedResponse.retryCount = this.retryCount;
+ this[kIsFromCache] = typedResponse.isFromCache;
+ this[kResponseSize] = Number(response.headers['content-length']) || undefined;
+ this[kResponse] = response;
+ response.once('end', () => {
+ this[kResponseSize] = this[kDownloadedSize];
+ this.emit('downloadProgress', this.downloadProgress);
+ });
+ response.once('error', (error) => {
+ // Force clean-up, because some packages don't do this.
+ // TODO: Fix decompress-response
+ response.destroy();
+ this._beforeError(new ReadError(error, this));
+ });
+ response.once('aborted', () => {
+ this._beforeError(new ReadError({
+ name: 'Error',
+ message: 'The server aborted pending request',
+ code: 'ECONNRESET'
+ }, this));
+ });
+ this.emit('downloadProgress', this.downloadProgress);
+ const rawCookies = response.headers['set-cookie'];
+ if (is_1.default.object(options.cookieJar) && rawCookies) {
+ let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString()));
+ if (options.ignoreInvalidCookies) {
+ promises = promises.map(async (p) => p.catch(() => { }));
+ }
+ try {
+ await Promise.all(promises);
+ }
+ catch (error) {
+ this._beforeError(error);
+ return;
+ }
+ }
+ if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) {
+ // We're being redirected, we don't care about the response.
+ // It'd be best to abort the request, but we can't because
+ // we would have to sacrifice the TCP connection. We don't want that.
+ response.resume();
+ if (this[kRequest]) {
+ this[kCancelTimeouts]();
+ // eslint-disable-next-line @typescript-eslint/no-dynamic-delete
+ delete this[kRequest];
+ this[kUnproxyEvents]();
+ }
+ const shouldBeGet = statusCode === 303 && options.method !== 'GET' && options.method !== 'HEAD';
+ if (shouldBeGet || !options.methodRewriting) {
+ // Server responded with "see other", indicating that the resource exists at another location,
+ // and the client should request it from that location via GET or HEAD.
+ options.method = 'GET';
+ if ('body' in options) {
+ delete options.body;
+ }
+ if ('json' in options) {
+ delete options.json;
+ }
+ if ('form' in options) {
+ delete options.form;
+ }
+ this[kBody] = undefined;
+ delete options.headers['content-length'];
+ }
+ if (this.redirects.length >= options.maxRedirects) {
+ this._beforeError(new MaxRedirectsError(this));
+ return;
+ }
+ try {
+ // Do not remove. See https://github.com/sindresorhus/got/pull/214
+ const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();
+ // Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604
+ const redirectUrl = new url_1.URL(redirectBuffer, url);
+ const redirectString = redirectUrl.toString();
+ decodeURI(redirectString);
+ // Redirecting to a different site, clear sensitive data.
+ if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) {
+ if ('host' in options.headers) {
+ delete options.headers.host;
+ }
+ if ('cookie' in options.headers) {
+ delete options.headers.cookie;
+ }
+ if ('authorization' in options.headers) {
+ delete options.headers.authorization;
+ }
+ if (options.username || options.password) {
+ options.username = '';
+ options.password = '';
+ }
+ }
+ else {
+ redirectUrl.username = options.username;
+ redirectUrl.password = options.password;
+ }
+ this.redirects.push(redirectString);
+ options.url = redirectUrl;
+ for (const hook of options.hooks.beforeRedirect) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(options, typedResponse);
+ }
+ this.emit('redirect', typedResponse, options);
+ await this._makeRequest();
+ }
+ catch (error) {
+ this._beforeError(error);
+ return;
+ }
+ return;
+ }
+ if (options.isStream && options.throwHttpErrors && !is_response_ok_1.isResponseOk(typedResponse)) {
+ this._beforeError(new HTTPError(typedResponse));
+ return;
+ }
+ response.on('readable', () => {
+ if (this[kTriggerRead]) {
+ this._read();
+ }
+ });
+ this.on('resume', () => {
+ response.resume();
+ });
+ this.on('pause', () => {
+ response.pause();
+ });
+ response.once('end', () => {
+ this.push(null);
+ });
+ this.emit('response', response);
+ for (const destination of this[kServerResponsesPiped]) {
+ if (destination.headersSent) {
+ continue;
+ }
+ // eslint-disable-next-line guard-for-in
+ for (const key in response.headers) {
+ const isAllowed = options.decompress ? key !== 'content-encoding' : true;
+ const value = response.headers[key];
+ if (isAllowed) {
+ destination.setHeader(key, value);
+ }
+ }
+ destination.statusCode = statusCode;
+ }
+ }
+ async _onResponse(response) {
+ try {
+ await this._onResponseBase(response);
+ }
+ catch (error) {
+ /* istanbul ignore next: better safe than sorry */
+ this._beforeError(error);
+ }
+ }
+ _onRequest(request) {
+ const { options } = this;
+ const { timeout, url } = options;
+ http_timer_1.default(request);
+ this[kCancelTimeouts] = timed_out_1.default(request, timeout, url);
+ const responseEventName = options.cache ? 'cacheableResponse' : 'response';
+ request.once(responseEventName, (response) => {
+ void this._onResponse(response);
+ });
+ request.once('error', (error) => {
+ var _a;
+ // Force clean-up, because some packages (e.g. nock) don't do this.
+ request.destroy();
+ // Node.js <= 12.18.2 mistakenly emits the response `end` first.
+ (_a = request.res) === null || _a === void 0 ? void 0 : _a.removeAllListeners('end');
+ error = error instanceof timed_out_1.TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this);
+ this._beforeError(error);
+ });
+ this[kUnproxyEvents] = proxy_events_1.default(request, this, proxiedRequestEvents);
+ this[kRequest] = request;
+ this.emit('uploadProgress', this.uploadProgress);
+ // Send body
+ const body = this[kBody];
+ const currentRequest = this.redirects.length === 0 ? this : request;
+ if (is_1.default.nodeStream(body)) {
+ body.pipe(currentRequest);
+ body.once('error', (error) => {
+ this._beforeError(new UploadError(error, this));
+ });
+ }
+ else {
+ this._unlockWrite();
+ if (!is_1.default.undefined(body)) {
+ this._writeRequest(body, undefined, () => { });
+ currentRequest.end();
+ this._lockWrite();
+ }
+ else if (this._cannotHaveBody || this._noPipe) {
+ currentRequest.end();
+ this._lockWrite();
+ }
+ }
+ this.emit('request', request);
+ }
+ async _createCacheableRequest(url, options) {
+ return new Promise((resolve, reject) => {
+ // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed
+ Object.assign(options, url_to_options_1.default(url));
+ // `http-cache-semantics` checks this
+ // TODO: Fix this ignore.
+ // @ts-expect-error
+ delete options.url;
+ let request;
+ // This is ugly
+ const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => {
+ // TODO: Fix `cacheable-response`
+ response._readableState.autoDestroy = false;
+ if (request) {
+ (await request).emit('cacheableResponse', response);
+ }
+ resolve(response);
+ });
+ // Restore options
+ options.url = url;
+ cacheRequest.once('error', reject);
+ cacheRequest.once('request', async (requestOrPromise) => {
+ request = requestOrPromise;
+ resolve(request);
+ });
+ });
+ }
+ async _makeRequest() {
+ var _a, _b, _c, _d, _e;
+ const { options } = this;
+ const { headers } = options;
+ for (const key in headers) {
+ if (is_1.default.undefined(headers[key])) {
+ // eslint-disable-next-line @typescript-eslint/no-dynamic-delete
+ delete headers[key];
+ }
+ else if (is_1.default.null_(headers[key])) {
+ throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`);
+ }
+ }
+ if (options.decompress && is_1.default.undefined(headers['accept-encoding'])) {
+ headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate';
+ }
+ // Set cookies
+ if (options.cookieJar) {
+ const cookieString = await options.cookieJar.getCookieString(options.url.toString());
+ if (is_1.default.nonEmptyString(cookieString)) {
+ options.headers.cookie = cookieString;
+ }
+ }
+ for (const hook of options.hooks.beforeRequest) {
+ // eslint-disable-next-line no-await-in-loop
+ const result = await hook(options);
+ if (!is_1.default.undefined(result)) {
+ // @ts-expect-error Skip the type mismatch to support abstract responses
+ options.request = () => result;
+ break;
+ }
+ }
+ if (options.body && this[kBody] !== options.body) {
+ this[kBody] = options.body;
+ }
+ const { agent, request, timeout, url } = options;
+ if (options.dnsCache && !('lookup' in options)) {
+ options.lookup = options.dnsCache.lookup;
+ }
+ // UNIX sockets
+ if (url.hostname === 'unix') {
+ const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`);
+ if (matches === null || matches === void 0 ? void 0 : matches.groups) {
+ const { socketPath, path } = matches.groups;
+ Object.assign(options, {
+ socketPath,
+ path,
+ host: ''
+ });
+ }
+ }
+ const isHttps = url.protocol === 'https:';
+ // Fallback function
+ let fallbackFn;
+ if (options.http2) {
+ fallbackFn = http2wrapper.auto;
+ }
+ else {
+ fallbackFn = isHttps ? https.request : http.request;
+ }
+ const realFn = (_a = options.request) !== null && _a !== void 0 ? _a : fallbackFn;
+ // Cache support
+ const fn = options.cache ? this._createCacheableRequest : realFn;
+ // Pass an agent directly when HTTP2 is disabled
+ if (agent && !options.http2) {
+ options.agent = agent[isHttps ? 'https' : 'http'];
+ }
+ // Prepare plain HTTP request options
+ options[kRequest] = realFn;
+ delete options.request;
+ // TODO: Fix this ignore.
+ // @ts-expect-error
+ delete options.timeout;
+ const requestOptions = options;
+ requestOptions.shared = (_b = options.cacheOptions) === null || _b === void 0 ? void 0 : _b.shared;
+ requestOptions.cacheHeuristic = (_c = options.cacheOptions) === null || _c === void 0 ? void 0 : _c.cacheHeuristic;
+ requestOptions.immutableMinTimeToLive = (_d = options.cacheOptions) === null || _d === void 0 ? void 0 : _d.immutableMinTimeToLive;
+ requestOptions.ignoreCargoCult = (_e = options.cacheOptions) === null || _e === void 0 ? void 0 : _e.ignoreCargoCult;
+ // If `dnsLookupIpVersion` is not present do not override `family`
+ if (options.dnsLookupIpVersion !== undefined) {
+ try {
+ requestOptions.family = dns_ip_version_1.dnsLookupIpVersionToFamily(options.dnsLookupIpVersion);
+ }
+ catch (_f) {
+ throw new Error('Invalid `dnsLookupIpVersion` option value');
+ }
+ }
+ // HTTPS options remapping
+ if (options.https) {
+ if ('rejectUnauthorized' in options.https) {
+ requestOptions.rejectUnauthorized = options.https.rejectUnauthorized;
+ }
+ if (options.https.checkServerIdentity) {
+ requestOptions.checkServerIdentity = options.https.checkServerIdentity;
+ }
+ if (options.https.certificateAuthority) {
+ requestOptions.ca = options.https.certificateAuthority;
+ }
+ if (options.https.certificate) {
+ requestOptions.cert = options.https.certificate;
+ }
+ if (options.https.key) {
+ requestOptions.key = options.https.key;
+ }
+ if (options.https.passphrase) {
+ requestOptions.passphrase = options.https.passphrase;
+ }
+ if (options.https.pfx) {
+ requestOptions.pfx = options.https.pfx;
+ }
+ }
+ try {
+ let requestOrResponse = await fn(url, requestOptions);
+ if (is_1.default.undefined(requestOrResponse)) {
+ requestOrResponse = fallbackFn(url, requestOptions);
+ }
+ // Restore options
+ options.request = request;
+ options.timeout = timeout;
+ options.agent = agent;
+ // HTTPS options restore
+ if (options.https) {
+ if ('rejectUnauthorized' in options.https) {
+ delete requestOptions.rejectUnauthorized;
+ }
+ if (options.https.checkServerIdentity) {
+ // @ts-expect-error - This one will be removed when we remove the alias.
+ delete requestOptions.checkServerIdentity;
+ }
+ if (options.https.certificateAuthority) {
+ delete requestOptions.ca;
+ }
+ if (options.https.certificate) {
+ delete requestOptions.cert;
+ }
+ if (options.https.key) {
+ delete requestOptions.key;
+ }
+ if (options.https.passphrase) {
+ delete requestOptions.passphrase;
+ }
+ if (options.https.pfx) {
+ delete requestOptions.pfx;
+ }
+ }
+ if (isClientRequest(requestOrResponse)) {
+ this._onRequest(requestOrResponse);
+ // Emit the response after the stream has been ended
+ }
+ else if (this.writable) {
+ this.once('finish', () => {
+ void this._onResponse(requestOrResponse);
+ });
+ this._unlockWrite();
+ this.end();
+ this._lockWrite();
+ }
+ else {
+ void this._onResponse(requestOrResponse);
+ }
+ }
+ catch (error) {
+ if (error instanceof CacheableRequest.CacheError) {
+ throw new CacheError(error, this);
+ }
+ throw new RequestError(error.message, error, this);
+ }
+ }
+ async _error(error) {
+ try {
+ for (const hook of this.options.hooks.beforeError) {
+ // eslint-disable-next-line no-await-in-loop
+ error = await hook(error);
+ }
+ }
+ catch (error_) {
+ error = new RequestError(error_.message, error_, this);
+ }
+ this.destroy(error);
+ }
+ _beforeError(error) {
+ if (this[kStopReading]) {
+ return;
+ }
+ const { options } = this;
+ const retryCount = this.retryCount + 1;
+ this[kStopReading] = true;
+ if (!(error instanceof RequestError)) {
+ error = new RequestError(error.message, error, this);
+ }
+ const typedError = error;
+ const { response } = typedError;
+ void (async () => {
+ if (response && !response.body) {
+ response.setEncoding(this._readableState.encoding);
+ try {
+ response.rawBody = await get_buffer_1.default(response);
+ response.body = response.rawBody.toString();
+ }
+ catch (_a) { }
+ }
+ if (this.listenerCount('retry') !== 0) {
+ let backoff;
+ try {
+ let retryAfter;
+ if (response && 'retry-after' in response.headers) {
+ retryAfter = Number(response.headers['retry-after']);
+ if (Number.isNaN(retryAfter)) {
+ retryAfter = Date.parse(response.headers['retry-after']) - Date.now();
+ if (retryAfter <= 0) {
+ retryAfter = 1;
+ }
+ }
+ else {
+ retryAfter *= 1000;
+ }
+ }
+ backoff = await options.retry.calculateDelay({
+ attemptCount: retryCount,
+ retryOptions: options.retry,
+ error: typedError,
+ retryAfter,
+ computedValue: calculate_retry_delay_1.default({
+ attemptCount: retryCount,
+ retryOptions: options.retry,
+ error: typedError,
+ retryAfter,
+ computedValue: 0
+ })
+ });
+ }
+ catch (error_) {
+ void this._error(new RequestError(error_.message, error_, this));
+ return;
+ }
+ if (backoff) {
+ const retry = async () => {
+ try {
+ for (const hook of this.options.hooks.beforeRetry) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(this.options, typedError, retryCount);
+ }
+ }
+ catch (error_) {
+ void this._error(new RequestError(error_.message, error, this));
+ return;
+ }
+ // Something forced us to abort the retry
+ if (this.destroyed) {
+ return;
+ }
+ this.destroy();
+ this.emit('retry', retryCount, error);
+ };
+ this[kRetryTimeout] = setTimeout(retry, backoff);
+ return;
+ }
+ }
+ void this._error(typedError);
+ })();
+ }
+ _read() {
+ this[kTriggerRead] = true;
+ const response = this[kResponse];
+ if (response && !this[kStopReading]) {
+ // We cannot put this in the `if` above
+ // because `.read()` also triggers the `end` event
+ if (response.readableLength) {
+ this[kTriggerRead] = false;
+ }
+ let data;
+ while ((data = response.read()) !== null) {
+ this[kDownloadedSize] += data.length;
+ this[kStartedReading] = true;
+ const progress = this.downloadProgress;
+ if (progress.percent < 1) {
+ this.emit('downloadProgress', progress);
+ }
+ this.push(data);
+ }
+ }
+ }
+ // Node.js 12 has incorrect types, so the encoding must be a string
+ _write(chunk, encoding, callback) {
+ const write = () => {
+ this._writeRequest(chunk, encoding, callback);
+ };
+ if (this.requestInitialized) {
+ write();
+ }
+ else {
+ this[kJobs].push(write);
+ }
+ }
+ _writeRequest(chunk, encoding, callback) {
+ if (this[kRequest].destroyed) {
+ // Probably the `ClientRequest` instance will throw
+ return;
+ }
+ this._progressCallbacks.push(() => {
+ this[kUploadedSize] += Buffer.byteLength(chunk, encoding);
+ const progress = this.uploadProgress;
+ if (progress.percent < 1) {
+ this.emit('uploadProgress', progress);
+ }
+ });
+ // TODO: What happens if it's from cache? Then this[kRequest] won't be defined.
+ this[kRequest].write(chunk, encoding, (error) => {
+ if (!error && this._progressCallbacks.length > 0) {
+ this._progressCallbacks.shift()();
+ }
+ callback(error);
+ });
+ }
+ _final(callback) {
+ const endRequest = () => {
+ // FIX: Node.js 10 calls the write callback AFTER the end callback!
+ while (this._progressCallbacks.length !== 0) {
+ this._progressCallbacks.shift()();
+ }
+ // We need to check if `this[kRequest]` is present,
+ // because it isn't when we use cache.
+ if (!(kRequest in this)) {
+ callback();
+ return;
+ }
+ if (this[kRequest].destroyed) {
+ callback();
+ return;
+ }
+ this[kRequest].end((error) => {
+ if (!error) {
+ this[kBodySize] = this[kUploadedSize];
+ this.emit('uploadProgress', this.uploadProgress);
+ this[kRequest].emit('upload-complete');
+ }
+ callback(error);
+ });
+ };
+ if (this.requestInitialized) {
+ endRequest();
+ }
+ else {
+ this[kJobs].push(endRequest);
+ }
+ }
+ _destroy(error, callback) {
+ var _a;
+ this[kStopReading] = true;
+ // Prevent further retries
+ clearTimeout(this[kRetryTimeout]);
+ if (kRequest in this) {
+ this[kCancelTimeouts]();
+ // TODO: Remove the next `if` when these get fixed:
+ // - https://github.com/nodejs/node/issues/32851
+ if (!((_a = this[kResponse]) === null || _a === void 0 ? void 0 : _a.complete)) {
+ this[kRequest].destroy();
+ }
+ }
+ if (error !== null && !is_1.default.undefined(error) && !(error instanceof RequestError)) {
+ error = new RequestError(error.message, error, this);
+ }
+ callback(error);
+ }
+ get _isAboutToError() {
+ return this[kStopReading];
+ }
+ /**
+ The remote IP address.
+ */
+ get ip() {
+ var _a;
+ return (_a = this.socket) === null || _a === void 0 ? void 0 : _a.remoteAddress;
+ }
+ /**
+ Indicates whether the request has been aborted or not.
+ */
+ get aborted() {
+ var _a, _b, _c;
+ return ((_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroyed) !== null && _b !== void 0 ? _b : this.destroyed) && !((_c = this[kOriginalResponse]) === null || _c === void 0 ? void 0 : _c.complete);
+ }
+ get socket() {
+ var _a, _b;
+ return (_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.socket) !== null && _b !== void 0 ? _b : undefined;
+ }
+ /**
+ Progress event for downloading (receiving a response).
+ */
+ get downloadProgress() {
+ let percent;
+ if (this[kResponseSize]) {
+ percent = this[kDownloadedSize] / this[kResponseSize];
+ }
+ else if (this[kResponseSize] === this[kDownloadedSize]) {
+ percent = 1;
+ }
+ else {
+ percent = 0;
+ }
+ return {
+ percent,
+ transferred: this[kDownloadedSize],
+ total: this[kResponseSize]
+ };
+ }
+ /**
+ Progress event for uploading (sending a request).
+ */
+ get uploadProgress() {
+ let percent;
+ if (this[kBodySize]) {
+ percent = this[kUploadedSize] / this[kBodySize];
+ }
+ else if (this[kBodySize] === this[kUploadedSize]) {
+ percent = 1;
+ }
+ else {
+ percent = 0;
+ }
+ return {
+ percent,
+ transferred: this[kUploadedSize],
+ total: this[kBodySize]
+ };
+ }
+ /**
+ The object contains the following properties:
+
+ - `start` - Time when the request started.
+ - `socket` - Time when a socket was assigned to the request.
+ - `lookup` - Time when the DNS lookup finished.
+ - `connect` - Time when the socket successfully connected.
+ - `secureConnect` - Time when the socket securely connected.
+ - `upload` - Time when the request finished uploading.
+ - `response` - Time when the request fired `response` event.
+ - `end` - Time when the response fired `end` event.
+ - `error` - Time when the request fired `error` event.
+ - `abort` - Time when the request fired `abort` event.
+ - `phases`
+ - `wait` - `timings.socket - timings.start`
+ - `dns` - `timings.lookup - timings.socket`
+ - `tcp` - `timings.connect - timings.lookup`
+ - `tls` - `timings.secureConnect - timings.connect`
+ - `request` - `timings.upload - (timings.secureConnect || timings.connect)`
+ - `firstByte` - `timings.response - timings.upload`
+ - `download` - `timings.end - timings.response`
+ - `total` - `(timings.end || timings.error || timings.abort) - timings.start`
+
+ If something has not been measured yet, it will be `undefined`.
+
+ __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.
+ */
+ get timings() {
+ var _a;
+ return (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.timings;
+ }
+ /**
+ Whether the response was retrieved from the cache.
+ */
+ get isFromCache() {
+ return this[kIsFromCache];
+ }
+ pipe(destination, options) {
+ if (this[kStartedReading]) {
+ throw new Error('Failed to pipe. The response has been emitted already.');
+ }
+ if (destination instanceof http_1.ServerResponse) {
+ this[kServerResponsesPiped].add(destination);
+ }
+ return super.pipe(destination, options);
+ }
+ unpipe(destination) {
+ if (destination instanceof http_1.ServerResponse) {
+ this[kServerResponsesPiped].delete(destination);
+ }
+ super.unpipe(destination);
+ return this;
+ }
+}
+exports.default = Request;
+
+
+/***/ }),
+
+/***/ 4993:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.dnsLookupIpVersionToFamily = exports.isDnsLookupIpVersion = void 0;
+const conversionTable = {
+ auto: 0,
+ ipv4: 4,
+ ipv6: 6
+};
+exports.isDnsLookupIpVersion = (value) => {
+ return value in conversionTable;
+};
+exports.dnsLookupIpVersionToFamily = (dnsLookupIpVersion) => {
+ if (exports.isDnsLookupIpVersion(dnsLookupIpVersion)) {
+ return conversionTable[dnsLookupIpVersion];
+ }
+ throw new Error('Invalid DNS lookup IP version');
+};
+
+
+/***/ }),
+
+/***/ 4564:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fs_1 = __nccwpck_require__(5747);
+const util_1 = __nccwpck_require__(1669);
+const is_1 = __nccwpck_require__(7678);
+const is_form_data_1 = __nccwpck_require__(40);
+const statAsync = util_1.promisify(fs_1.stat);
+exports.default = async (body, headers) => {
+ if (headers && 'content-length' in headers) {
+ return Number(headers['content-length']);
+ }
+ if (!body) {
+ return 0;
+ }
+ if (is_1.default.string(body)) {
+ return Buffer.byteLength(body);
+ }
+ if (is_1.default.buffer(body)) {
+ return body.length;
+ }
+ if (is_form_data_1.default(body)) {
+ return util_1.promisify(body.getLength.bind(body))();
+ }
+ if (body instanceof fs_1.ReadStream) {
+ const { size } = await statAsync(body.path);
+ if (size === 0) {
+ return undefined;
+ }
+ return size;
+ }
+ return undefined;
+};
+
+
+/***/ }),
+
+/***/ 4500:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+// TODO: Update https://github.com/sindresorhus/get-stream
+const getBuffer = async (stream) => {
+ const chunks = [];
+ let length = 0;
+ for await (const chunk of stream) {
+ chunks.push(chunk);
+ length += Buffer.byteLength(chunk);
+ }
+ if (Buffer.isBuffer(chunks[0])) {
+ return Buffer.concat(chunks, length);
+ }
+ return Buffer.from(chunks.join(''));
+};
+exports.default = getBuffer;
+
+
+/***/ }),
+
+/***/ 40:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const is_1 = __nccwpck_require__(7678);
+exports.default = (body) => is_1.default.nodeStream(body) && is_1.default.function_(body.getBoundary);
+
+
+/***/ }),
+
+/***/ 9298:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isResponseOk = void 0;
+exports.isResponseOk = (response) => {
+ const { statusCode } = response;
+ const limitStatusCode = response.request.options.followRedirect ? 299 : 399;
+ return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;
+};
+
+
+/***/ }),
+
+/***/ 9219:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+/* istanbul ignore file: deprecated */
+const url_1 = __nccwpck_require__(8835);
+const keys = [
+ 'protocol',
+ 'host',
+ 'hostname',
+ 'port',
+ 'pathname',
+ 'search'
+];
+exports.default = (origin, options) => {
+ var _a, _b;
+ if (options.path) {
+ if (options.pathname) {
+ throw new TypeError('Parameters `path` and `pathname` are mutually exclusive.');
+ }
+ if (options.search) {
+ throw new TypeError('Parameters `path` and `search` are mutually exclusive.');
+ }
+ if (options.searchParams) {
+ throw new TypeError('Parameters `path` and `searchParams` are mutually exclusive.');
+ }
+ }
+ if (options.search && options.searchParams) {
+ throw new TypeError('Parameters `search` and `searchParams` are mutually exclusive.');
+ }
+ if (!origin) {
+ if (!options.protocol) {
+ throw new TypeError('No URL protocol specified');
+ }
+ origin = `${options.protocol}//${(_b = (_a = options.hostname) !== null && _a !== void 0 ? _a : options.host) !== null && _b !== void 0 ? _b : ''}`;
+ }
+ const url = new url_1.URL(origin);
+ if (options.path) {
+ const searchIndex = options.path.indexOf('?');
+ if (searchIndex === -1) {
+ options.pathname = options.path;
+ }
+ else {
+ options.pathname = options.path.slice(0, searchIndex);
+ options.search = options.path.slice(searchIndex + 1);
+ }
+ delete options.path;
+ }
+ for (const key of keys) {
+ if (options[key]) {
+ url[key] = options[key].toString();
+ }
+ }
+ return url;
+};
+
+
+/***/ }),
+
+/***/ 3021:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function default_1(from, to, events) {
+ const fns = {};
+ for (const event of events) {
+ fns[event] = (...args) => {
+ to.emit(event, ...args);
+ };
+ from.on(event, fns[event]);
+ }
+ return () => {
+ for (const event of events) {
+ from.off(event, fns[event]);
+ }
+ };
+}
+exports.default = default_1;
+
+
+/***/ }),
+
+/***/ 2454:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.TimeoutError = void 0;
+const net = __nccwpck_require__(1631);
+const unhandle_1 = __nccwpck_require__(1593);
+const reentry = Symbol('reentry');
+const noop = () => { };
+class TimeoutError extends Error {
+ constructor(threshold, event) {
+ super(`Timeout awaiting '${event}' for ${threshold}ms`);
+ this.event = event;
+ this.name = 'TimeoutError';
+ this.code = 'ETIMEDOUT';
+ }
+}
+exports.TimeoutError = TimeoutError;
+exports.default = (request, delays, options) => {
+ if (reentry in request) {
+ return noop;
+ }
+ request[reentry] = true;
+ const cancelers = [];
+ const { once, unhandleAll } = unhandle_1.default();
+ const addTimeout = (delay, callback, event) => {
+ var _a;
+ const timeout = setTimeout(callback, delay, delay, event);
+ (_a = timeout.unref) === null || _a === void 0 ? void 0 : _a.call(timeout);
+ const cancel = () => {
+ clearTimeout(timeout);
+ };
+ cancelers.push(cancel);
+ return cancel;
+ };
+ const { host, hostname } = options;
+ const timeoutHandler = (delay, event) => {
+ request.destroy(new TimeoutError(delay, event));
+ };
+ const cancelTimeouts = () => {
+ for (const cancel of cancelers) {
+ cancel();
+ }
+ unhandleAll();
+ };
+ request.once('error', error => {
+ cancelTimeouts();
+ // Save original behavior
+ /* istanbul ignore next */
+ if (request.listenerCount('error') === 0) {
+ throw error;
+ }
+ });
+ request.once('close', cancelTimeouts);
+ once(request, 'response', (response) => {
+ once(response, 'end', cancelTimeouts);
+ });
+ if (typeof delays.request !== 'undefined') {
+ addTimeout(delays.request, timeoutHandler, 'request');
+ }
+ if (typeof delays.socket !== 'undefined') {
+ const socketTimeoutHandler = () => {
+ timeoutHandler(delays.socket, 'socket');
+ };
+ request.setTimeout(delays.socket, socketTimeoutHandler);
+ // `request.setTimeout(0)` causes a memory leak.
+ // We can just remove the listener and forget about the timer - it's unreffed.
+ // See https://github.com/sindresorhus/got/issues/690
+ cancelers.push(() => {
+ request.removeListener('timeout', socketTimeoutHandler);
+ });
+ }
+ once(request, 'socket', (socket) => {
+ var _a;
+ const { socketPath } = request;
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ const hasPath = Boolean(socketPath !== null && socketPath !== void 0 ? socketPath : net.isIP((_a = hostname !== null && hostname !== void 0 ? hostname : host) !== null && _a !== void 0 ? _a : '') !== 0);
+ if (typeof delays.lookup !== 'undefined' && !hasPath && typeof socket.address().address === 'undefined') {
+ const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
+ once(socket, 'lookup', cancelTimeout);
+ }
+ if (typeof delays.connect !== 'undefined') {
+ const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
+ if (hasPath) {
+ once(socket, 'connect', timeConnect());
+ }
+ else {
+ once(socket, 'lookup', (error) => {
+ if (error === null) {
+ once(socket, 'connect', timeConnect());
+ }
+ });
+ }
+ }
+ if (typeof delays.secureConnect !== 'undefined' && options.protocol === 'https:') {
+ once(socket, 'connect', () => {
+ const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
+ once(socket, 'secureConnect', cancelTimeout);
+ });
+ }
+ }
+ if (typeof delays.send !== 'undefined') {
+ const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ once(socket, 'connect', () => {
+ once(request, 'upload-complete', timeRequest());
+ });
+ }
+ else {
+ once(request, 'upload-complete', timeRequest());
+ }
+ }
+ });
+ if (typeof delays.response !== 'undefined') {
+ once(request, 'upload-complete', () => {
+ const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
+ once(request, 'response', cancelTimeout);
+ });
+ }
+ return cancelTimeouts;
+};
+
+
+/***/ }),
+
+/***/ 1593:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+// When attaching listeners, it's very easy to forget about them.
+// Especially if you do error handling and set timeouts.
+// So instead of checking if it's proper to throw an error on every timeout ever,
+// use this simple tool which will remove all listeners you have attached.
+exports.default = () => {
+ const handlers = [];
+ return {
+ once(origin, event, fn) {
+ origin.once(event, fn);
+ handlers.push({ origin, event, fn });
+ },
+ unhandleAll() {
+ for (const handler of handlers) {
+ const { origin, event, fn } = handler;
+ origin.removeListener(event, fn);
+ }
+ handlers.length = 0;
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 8026:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const is_1 = __nccwpck_require__(7678);
+exports.default = (url) => {
+ // Cast to URL
+ url = url;
+ const options = {
+ protocol: url.protocol,
+ hostname: is_1.default.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
+ host: url.host,
+ hash: url.hash,
+ search: url.search,
+ pathname: url.pathname,
+ href: url.href,
+ path: `${url.pathname || ''}${url.search || ''}`
+ };
+ if (is_1.default.string(url.port) && url.port.length > 0) {
+ options.port = Number(url.port);
+ }
+ if (url.username || url.password) {
+ options.auth = `${url.username || ''}:${url.password || ''}`;
+ }
+ return options;
+};
+
+
+/***/ }),
+
+/***/ 7288:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+class WeakableMap {
+ constructor() {
+ this.weakMap = new WeakMap();
+ this.map = new Map();
+ }
+ set(key, value) {
+ if (typeof key === 'object') {
+ this.weakMap.set(key, value);
+ }
+ else {
+ this.map.set(key, value);
+ }
+ }
+ get(key) {
+ if (typeof key === 'object') {
+ return this.weakMap.get(key);
+ }
+ return this.map.get(key);
+ }
+ has(key) {
+ if (typeof key === 'object') {
+ return this.weakMap.has(key);
+ }
+ return this.map.has(key);
+ }
+}
+exports.default = WeakableMap;
+
+
+/***/ }),
+
+/***/ 4337:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.defaultHandler = void 0;
+const is_1 = __nccwpck_require__(7678);
+const as_promise_1 = __nccwpck_require__(6056);
+const create_rejection_1 = __nccwpck_require__(6457);
+const core_1 = __nccwpck_require__(94);
+const deep_freeze_1 = __nccwpck_require__(285);
+const errors = {
+ RequestError: as_promise_1.RequestError,
+ CacheError: as_promise_1.CacheError,
+ ReadError: as_promise_1.ReadError,
+ HTTPError: as_promise_1.HTTPError,
+ MaxRedirectsError: as_promise_1.MaxRedirectsError,
+ TimeoutError: as_promise_1.TimeoutError,
+ ParseError: as_promise_1.ParseError,
+ CancelError: as_promise_1.CancelError,
+ UnsupportedProtocolError: as_promise_1.UnsupportedProtocolError,
+ UploadError: as_promise_1.UploadError
+};
+// The `delay` package weighs 10KB (!)
+const delay = async (ms) => new Promise(resolve => {
+ setTimeout(resolve, ms);
+});
+const { normalizeArguments } = core_1.default;
+const mergeOptions = (...sources) => {
+ let mergedOptions;
+ for (const source of sources) {
+ mergedOptions = normalizeArguments(undefined, source, mergedOptions);
+ }
+ return mergedOptions;
+};
+const getPromiseOrStream = (options) => options.isStream ? new core_1.default(undefined, options) : as_promise_1.default(options);
+const isGotInstance = (value) => ('defaults' in value && 'options' in value.defaults);
+const aliases = [
+ 'get',
+ 'post',
+ 'put',
+ 'patch',
+ 'head',
+ 'delete'
+];
+exports.defaultHandler = (options, next) => next(options);
+const callInitHooks = (hooks, options) => {
+ if (hooks) {
+ for (const hook of hooks) {
+ hook(options);
+ }
+ }
+};
+const create = (defaults) => {
+ // Proxy properties from next handlers
+ defaults._rawHandlers = defaults.handlers;
+ defaults.handlers = defaults.handlers.map(fn => ((options, next) => {
+ // This will be assigned by assigning result
+ let root;
+ const result = fn(options, newOptions => {
+ root = next(newOptions);
+ return root;
+ });
+ if (result !== root && !options.isStream && root) {
+ const typedResult = result;
+ const { then: promiseThen, catch: promiseCatch, finally: promiseFianlly } = typedResult;
+ Object.setPrototypeOf(typedResult, Object.getPrototypeOf(root));
+ Object.defineProperties(typedResult, Object.getOwnPropertyDescriptors(root));
+ // These should point to the new promise
+ // eslint-disable-next-line promise/prefer-await-to-then
+ typedResult.then = promiseThen;
+ typedResult.catch = promiseCatch;
+ typedResult.finally = promiseFianlly;
+ }
+ return result;
+ }));
+ // Got interface
+ const got = ((url, options = {}, _defaults) => {
+ var _a, _b;
+ let iteration = 0;
+ const iterateHandlers = (newOptions) => {
+ return defaults.handlers[iteration++](newOptions, iteration === defaults.handlers.length ? getPromiseOrStream : iterateHandlers);
+ };
+ // TODO: Remove this in Got 12.
+ if (is_1.default.plainObject(url)) {
+ const mergedOptions = {
+ ...url,
+ ...options
+ };
+ core_1.setNonEnumerableProperties([url, options], mergedOptions);
+ options = mergedOptions;
+ url = undefined;
+ }
+ try {
+ // Call `init` hooks
+ let initHookError;
+ try {
+ callInitHooks(defaults.options.hooks.init, options);
+ callInitHooks((_a = options.hooks) === null || _a === void 0 ? void 0 : _a.init, options);
+ }
+ catch (error) {
+ initHookError = error;
+ }
+ // Normalize options & call handlers
+ const normalizedOptions = normalizeArguments(url, options, _defaults !== null && _defaults !== void 0 ? _defaults : defaults.options);
+ normalizedOptions[core_1.kIsNormalizedAlready] = true;
+ if (initHookError) {
+ throw new as_promise_1.RequestError(initHookError.message, initHookError, normalizedOptions);
+ }
+ return iterateHandlers(normalizedOptions);
+ }
+ catch (error) {
+ if (options.isStream) {
+ throw error;
+ }
+ else {
+ return create_rejection_1.default(error, defaults.options.hooks.beforeError, (_b = options.hooks) === null || _b === void 0 ? void 0 : _b.beforeError);
+ }
+ }
+ });
+ got.extend = (...instancesOrOptions) => {
+ const optionsArray = [defaults.options];
+ let handlers = [...defaults._rawHandlers];
+ let isMutableDefaults;
+ for (const value of instancesOrOptions) {
+ if (isGotInstance(value)) {
+ optionsArray.push(value.defaults.options);
+ handlers.push(...value.defaults._rawHandlers);
+ isMutableDefaults = value.defaults.mutableDefaults;
+ }
+ else {
+ optionsArray.push(value);
+ if ('handlers' in value) {
+ handlers.push(...value.handlers);
+ }
+ isMutableDefaults = value.mutableDefaults;
+ }
+ }
+ handlers = handlers.filter(handler => handler !== exports.defaultHandler);
+ if (handlers.length === 0) {
+ handlers.push(exports.defaultHandler);
+ }
+ return create({
+ options: mergeOptions(...optionsArray),
+ handlers,
+ mutableDefaults: Boolean(isMutableDefaults)
+ });
+ };
+ // Pagination
+ const paginateEach = (async function* (url, options) {
+ // TODO: Remove this `@ts-expect-error` when upgrading to TypeScript 4.
+ // Error: Argument of type 'Merge> | undefined' is not assignable to parameter of type 'Options | undefined'.
+ // @ts-expect-error
+ let normalizedOptions = normalizeArguments(url, options, defaults.options);
+ normalizedOptions.resolveBodyOnly = false;
+ const pagination = normalizedOptions.pagination;
+ if (!is_1.default.object(pagination)) {
+ throw new TypeError('`options.pagination` must be implemented');
+ }
+ const all = [];
+ let { countLimit } = pagination;
+ let numberOfRequests = 0;
+ while (numberOfRequests < pagination.requestLimit) {
+ if (numberOfRequests !== 0) {
+ // eslint-disable-next-line no-await-in-loop
+ await delay(pagination.backoff);
+ }
+ // @ts-expect-error FIXME!
+ // TODO: Throw when result is not an instance of Response
+ // eslint-disable-next-line no-await-in-loop
+ const result = (await got(undefined, undefined, normalizedOptions));
+ // eslint-disable-next-line no-await-in-loop
+ const parsed = await pagination.transform(result);
+ const current = [];
+ for (const item of parsed) {
+ if (pagination.filter(item, all, current)) {
+ if (!pagination.shouldContinue(item, all, current)) {
+ return;
+ }
+ yield item;
+ if (pagination.stackAllItems) {
+ all.push(item);
+ }
+ current.push(item);
+ if (--countLimit <= 0) {
+ return;
+ }
+ }
+ }
+ const optionsToMerge = pagination.paginate(result, all, current);
+ if (optionsToMerge === false) {
+ return;
+ }
+ if (optionsToMerge === result.request.options) {
+ normalizedOptions = result.request.options;
+ }
+ else if (optionsToMerge !== undefined) {
+ normalizedOptions = normalizeArguments(undefined, optionsToMerge, normalizedOptions);
+ }
+ numberOfRequests++;
+ }
+ });
+ got.paginate = paginateEach;
+ got.paginate.all = (async (url, options) => {
+ const results = [];
+ for await (const item of paginateEach(url, options)) {
+ results.push(item);
+ }
+ return results;
+ });
+ // For those who like very descriptive names
+ got.paginate.each = paginateEach;
+ // Stream API
+ got.stream = ((url, options) => got(url, { ...options, isStream: true }));
+ // Shortcuts
+ for (const method of aliases) {
+ got[method] = ((url, options) => got(url, { ...options, method }));
+ got.stream[method] = ((url, options) => {
+ return got(url, { ...options, method, isStream: true });
+ });
+ }
+ Object.assign(got, errors);
+ Object.defineProperty(got, 'defaults', {
+ value: defaults.mutableDefaults ? defaults : deep_freeze_1.default(defaults),
+ writable: defaults.mutableDefaults,
+ configurable: defaults.mutableDefaults,
+ enumerable: true
+ });
+ got.mergeOptions = mergeOptions;
+ return got;
+};
+exports.default = create;
+__exportStar(__nccwpck_require__(2613), exports);
+
+
+/***/ }),
+
+/***/ 3061:
+/***/ (function(module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const url_1 = __nccwpck_require__(8835);
+const create_1 = __nccwpck_require__(4337);
+const defaults = {
+ options: {
+ method: 'GET',
+ retry: {
+ limit: 2,
+ methods: [
+ 'GET',
+ 'PUT',
+ 'HEAD',
+ 'DELETE',
+ 'OPTIONS',
+ 'TRACE'
+ ],
+ statusCodes: [
+ 408,
+ 413,
+ 429,
+ 500,
+ 502,
+ 503,
+ 504,
+ 521,
+ 522,
+ 524
+ ],
+ errorCodes: [
+ 'ETIMEDOUT',
+ 'ECONNRESET',
+ 'EADDRINUSE',
+ 'ECONNREFUSED',
+ 'EPIPE',
+ 'ENOTFOUND',
+ 'ENETUNREACH',
+ 'EAI_AGAIN'
+ ],
+ maxRetryAfter: undefined,
+ calculateDelay: ({ computedValue }) => computedValue
+ },
+ timeout: {},
+ headers: {
+ 'user-agent': 'got (https://github.com/sindresorhus/got)'
+ },
+ hooks: {
+ init: [],
+ beforeRequest: [],
+ beforeRedirect: [],
+ beforeRetry: [],
+ beforeError: [],
+ afterResponse: []
+ },
+ cache: undefined,
+ dnsCache: undefined,
+ decompress: true,
+ throwHttpErrors: true,
+ followRedirect: true,
+ isStream: false,
+ responseType: 'text',
+ resolveBodyOnly: false,
+ maxRedirects: 10,
+ prefixUrl: '',
+ methodRewriting: true,
+ ignoreInvalidCookies: false,
+ context: {},
+ // TODO: Set this to `true` when Got 12 gets released
+ http2: false,
+ allowGetBody: false,
+ https: undefined,
+ pagination: {
+ transform: (response) => {
+ if (response.request.options.responseType === 'json') {
+ return response.body;
+ }
+ return JSON.parse(response.body);
+ },
+ paginate: response => {
+ if (!Reflect.has(response.headers, 'link')) {
+ return false;
+ }
+ const items = response.headers.link.split(',');
+ let next;
+ for (const item of items) {
+ const parsed = item.split(';');
+ if (parsed[1].includes('next')) {
+ next = parsed[0].trimStart().trim();
+ next = next.slice(1, -1);
+ break;
+ }
+ }
+ if (next) {
+ const options = {
+ url: new url_1.URL(next)
+ };
+ return options;
+ }
+ return false;
+ },
+ filter: () => true,
+ shouldContinue: () => true,
+ countLimit: Infinity,
+ backoff: 0,
+ requestLimit: 10000,
+ stackAllItems: true
+ },
+ parseJson: (text) => JSON.parse(text),
+ stringifyJson: (object) => JSON.stringify(object),
+ cacheOptions: {}
+ },
+ handlers: [create_1.defaultHandler],
+ mutableDefaults: false
+};
+const got = create_1.default(defaults);
+exports.default = got;
+// For CommonJS default export support
+module.exports = got;
+module.exports.default = got;
+module.exports.__esModule = true; // Workaround for TS issue: https://github.com/sindresorhus/got/pull/1267
+__exportStar(__nccwpck_require__(4337), exports);
+__exportStar(__nccwpck_require__(6056), exports);
+
+
+/***/ }),
+
+/***/ 2613:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+
+/***/ }),
+
+/***/ 285:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const is_1 = __nccwpck_require__(7678);
+function deepFreeze(object) {
+ for (const value of Object.values(object)) {
+ if (is_1.default.plainObject(value) || is_1.default.array(value)) {
+ deepFreeze(value);
+ }
+ }
+ return Object.freeze(object);
+}
+exports.default = deepFreeze;
+
+
+/***/ }),
+
+/***/ 397:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const alreadyWarned = new Set();
+exports.default = (message) => {
+ if (alreadyWarned.has(message)) {
+ return;
+ }
+ alreadyWarned.add(message);
+ // @ts-expect-error Missing types.
+ process.emitWarning(`Got: ${message}`, {
+ type: 'DeprecationWarning'
+ });
+};
+
+
+/***/ }),
+
+/***/ 1002:
+/***/ ((module) => {
+
+"use strict";
+
+// rfc7231 6.1
+const statusCodeCacheableByDefault = new Set([
+ 200,
+ 203,
+ 204,
+ 206,
+ 300,
+ 301,
+ 404,
+ 405,
+ 410,
+ 414,
+ 501,
+]);
+
+// This implementation does not understand partial responses (206)
+const understoodStatuses = new Set([
+ 200,
+ 203,
+ 204,
+ 300,
+ 301,
+ 302,
+ 303,
+ 307,
+ 308,
+ 404,
+ 405,
+ 410,
+ 414,
+ 501,
+]);
+
+const errorStatusCodes = new Set([
+ 500,
+ 502,
+ 503,
+ 504,
+]);
+
+const hopByHopHeaders = {
+ date: true, // included, because we add Age update Date
+ connection: true,
+ 'keep-alive': true,
+ 'proxy-authenticate': true,
+ 'proxy-authorization': true,
+ te: true,
+ trailer: true,
+ 'transfer-encoding': true,
+ upgrade: true,
+};
+
+const excludedFromRevalidationUpdate = {
+ // Since the old body is reused, it doesn't make sense to change properties of the body
+ 'content-length': true,
+ 'content-encoding': true,
+ 'transfer-encoding': true,
+ 'content-range': true,
+};
+
+function toNumberOrZero(s) {
+ const n = parseInt(s, 10);
+ return isFinite(n) ? n : 0;
+}
+
+// RFC 5861
+function isErrorResponse(response) {
+ // consider undefined response as faulty
+ if(!response) {
+ return true
+ }
+ return errorStatusCodes.has(response.status);
+}
+
+function parseCacheControl(header) {
+ const cc = {};
+ if (!header) return cc;
+
+ // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),
+ // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale
+ const parts = header.trim().split(/\s*,\s*/); // TODO: lame parsing
+ for (const part of parts) {
+ const [k, v] = part.split(/\s*=\s*/, 2);
+ cc[k] = v === undefined ? true : v.replace(/^"|"$/g, ''); // TODO: lame unquoting
+ }
+
+ return cc;
+}
+
+function formatCacheControl(cc) {
+ let parts = [];
+ for (const k in cc) {
+ const v = cc[k];
+ parts.push(v === true ? k : k + '=' + v);
+ }
+ if (!parts.length) {
+ return undefined;
+ }
+ return parts.join(', ');
+}
+
+module.exports = class CachePolicy {
+ constructor(
+ req,
+ res,
+ {
+ shared,
+ cacheHeuristic,
+ immutableMinTimeToLive,
+ ignoreCargoCult,
+ _fromObject,
+ } = {}
+ ) {
+ if (_fromObject) {
+ this._fromObject(_fromObject);
+ return;
+ }
+
+ if (!res || !res.headers) {
+ throw Error('Response headers missing');
+ }
+ this._assertRequestHasHeaders(req);
+
+ this._responseTime = this.now();
+ this._isShared = shared !== false;
+ this._cacheHeuristic =
+ undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
+ this._immutableMinTtl =
+ undefined !== immutableMinTimeToLive
+ ? immutableMinTimeToLive
+ : 24 * 3600 * 1000;
+
+ this._status = 'status' in res ? res.status : 200;
+ this._resHeaders = res.headers;
+ this._rescc = parseCacheControl(res.headers['cache-control']);
+ this._method = 'method' in req ? req.method : 'GET';
+ this._url = req.url;
+ this._host = req.headers.host;
+ this._noAuthorization = !req.headers.authorization;
+ this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
+ this._reqcc = parseCacheControl(req.headers['cache-control']);
+
+ // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
+ // so there's no point stricly adhering to the blindly copy&pasted directives.
+ if (
+ ignoreCargoCult &&
+ 'pre-check' in this._rescc &&
+ 'post-check' in this._rescc
+ ) {
+ delete this._rescc['pre-check'];
+ delete this._rescc['post-check'];
+ delete this._rescc['no-cache'];
+ delete this._rescc['no-store'];
+ delete this._rescc['must-revalidate'];
+ this._resHeaders = Object.assign({}, this._resHeaders, {
+ 'cache-control': formatCacheControl(this._rescc),
+ });
+ delete this._resHeaders.expires;
+ delete this._resHeaders.pragma;
+ }
+
+ // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive
+ // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1).
+ if (
+ res.headers['cache-control'] == null &&
+ /no-cache/.test(res.headers.pragma)
+ ) {
+ this._rescc['no-cache'] = true;
+ }
+ }
+
+ now() {
+ return Date.now();
+ }
+
+ storable() {
+ // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
+ return !!(
+ !this._reqcc['no-store'] &&
+ // A cache MUST NOT store a response to any request, unless:
+ // The request method is understood by the cache and defined as being cacheable, and
+ ('GET' === this._method ||
+ 'HEAD' === this._method ||
+ ('POST' === this._method && this._hasExplicitExpiration())) &&
+ // the response status code is understood by the cache, and
+ understoodStatuses.has(this._status) &&
+ // the "no-store" cache directive does not appear in request or response header fields, and
+ !this._rescc['no-store'] &&
+ // the "private" response directive does not appear in the response, if the cache is shared, and
+ (!this._isShared || !this._rescc.private) &&
+ // the Authorization header field does not appear in the request, if the cache is shared,
+ (!this._isShared ||
+ this._noAuthorization ||
+ this._allowsStoringAuthenticated()) &&
+ // the response either:
+ // contains an Expires header field, or
+ (this._resHeaders.expires ||
+ // contains a max-age response directive, or
+ // contains a s-maxage response directive and the cache is shared, or
+ // contains a public response directive.
+ this._rescc['max-age'] ||
+ (this._isShared && this._rescc['s-maxage']) ||
+ this._rescc.public ||
+ // has a status code that is defined as cacheable by default
+ statusCodeCacheableByDefault.has(this._status))
+ );
+ }
+
+ _hasExplicitExpiration() {
+ // 4.2.1 Calculating Freshness Lifetime
+ return (
+ (this._isShared && this._rescc['s-maxage']) ||
+ this._rescc['max-age'] ||
+ this._resHeaders.expires
+ );
+ }
+
+ _assertRequestHasHeaders(req) {
+ if (!req || !req.headers) {
+ throw Error('Request headers missing');
+ }
+ }
+
+ satisfiesWithoutRevalidation(req) {
+ this._assertRequestHasHeaders(req);
+
+ // When presented with a request, a cache MUST NOT reuse a stored response, unless:
+ // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
+ // unless the stored response is successfully validated (Section 4.3), and
+ const requestCC = parseCacheControl(req.headers['cache-control']);
+ if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
+ return false;
+ }
+
+ if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
+ return false;
+ }
+
+ if (
+ requestCC['min-fresh'] &&
+ this.timeToLive() < 1000 * requestCC['min-fresh']
+ ) {
+ return false;
+ }
+
+ // the stored response is either:
+ // fresh, or allowed to be served stale
+ if (this.stale()) {
+ const allowsStale =
+ requestCC['max-stale'] &&
+ !this._rescc['must-revalidate'] &&
+ (true === requestCC['max-stale'] ||
+ requestCC['max-stale'] > this.age() - this.maxAge());
+ if (!allowsStale) {
+ return false;
+ }
+ }
+
+ return this._requestMatches(req, false);
+ }
+
+ _requestMatches(req, allowHeadMethod) {
+ // The presented effective request URI and that of the stored response match, and
+ return (
+ (!this._url || this._url === req.url) &&
+ this._host === req.headers.host &&
+ // the request method associated with the stored response allows it to be used for the presented request, and
+ (!req.method ||
+ this._method === req.method ||
+ (allowHeadMethod && 'HEAD' === req.method)) &&
+ // selecting header fields nominated by the stored response (if any) match those presented, and
+ this._varyMatches(req)
+ );
+ }
+
+ _allowsStoringAuthenticated() {
+ // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
+ return (
+ this._rescc['must-revalidate'] ||
+ this._rescc.public ||
+ this._rescc['s-maxage']
+ );
+ }
+
+ _varyMatches(req) {
+ if (!this._resHeaders.vary) {
+ return true;
+ }
+
+ // A Vary header field-value of "*" always fails to match
+ if (this._resHeaders.vary === '*') {
+ return false;
+ }
+
+ const fields = this._resHeaders.vary
+ .trim()
+ .toLowerCase()
+ .split(/\s*,\s*/);
+ for (const name of fields) {
+ if (req.headers[name] !== this._reqHeaders[name]) return false;
+ }
+ return true;
+ }
+
+ _copyWithoutHopByHopHeaders(inHeaders) {
+ const headers = {};
+ for (const name in inHeaders) {
+ if (hopByHopHeaders[name]) continue;
+ headers[name] = inHeaders[name];
+ }
+ // 9.1. Connection
+ if (inHeaders.connection) {
+ const tokens = inHeaders.connection.trim().split(/\s*,\s*/);
+ for (const name of tokens) {
+ delete headers[name];
+ }
+ }
+ if (headers.warning) {
+ const warnings = headers.warning.split(/,/).filter(warning => {
+ return !/^\s*1[0-9][0-9]/.test(warning);
+ });
+ if (!warnings.length) {
+ delete headers.warning;
+ } else {
+ headers.warning = warnings.join(',').trim();
+ }
+ }
+ return headers;
+ }
+
+ responseHeaders() {
+ const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
+ const age = this.age();
+
+ // A cache SHOULD generate 113 warning if it heuristically chose a freshness
+ // lifetime greater than 24 hours and the response's age is greater than 24 hours.
+ if (
+ age > 3600 * 24 &&
+ !this._hasExplicitExpiration() &&
+ this.maxAge() > 3600 * 24
+ ) {
+ headers.warning =
+ (headers.warning ? `${headers.warning}, ` : '') +
+ '113 - "rfc7234 5.5.4"';
+ }
+ headers.age = `${Math.round(age)}`;
+ headers.date = new Date(this.now()).toUTCString();
+ return headers;
+ }
+
+ /**
+ * Value of the Date response header or current time if Date was invalid
+ * @return timestamp
+ */
+ date() {
+ const serverDate = Date.parse(this._resHeaders.date);
+ if (isFinite(serverDate)) {
+ return serverDate;
+ }
+ return this._responseTime;
+ }
- if (typeof pattern === 'string') {
- if (isEmptyString(str) || isEmptyString(pattern)) {
- return false;
+ /**
+ * Value of the Age header, in seconds, updated for the current time.
+ * May be fractional.
+ *
+ * @return Number
+ */
+ age() {
+ let age = this._ageValue();
+
+ const residentTime = (this.now() - this._responseTime) / 1000;
+ return age + residentTime;
}
- if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
- return true;
+ _ageValue() {
+ return toNumberOrZero(this._resHeaders.age);
}
- }
- return micromatch.isMatch(str, pattern, { ...options, contains: true });
-};
+ /**
+ * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
+ *
+ * For an up-to-date value, see `timeToLive()`.
+ *
+ * @return Number
+ */
+ maxAge() {
+ if (!this.storable() || this._rescc['no-cache']) {
+ return 0;
+ }
-/**
- * Filter the keys of the given object with the given `glob` pattern
- * and `options`. Does not attempt to match nested keys. If you need this feature,
- * use [glob-object][] instead.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.matchKeys(object, patterns[, options]);
- *
- * const obj = { aa: 'a', ab: 'b', ac: 'c' };
- * console.log(mm.matchKeys(obj, '*b'));
- * //=> { ab: 'b' }
- * ```
- * @param {Object} `object` The object with keys to filter.
- * @param {String|Array} `patterns` One or more glob patterns to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Object} Returns an object with only keys that match the given patterns.
- * @api public
- */
+ // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default
+ // so this implementation requires explicit opt-in via public header
+ if (
+ this._isShared &&
+ (this._resHeaders['set-cookie'] &&
+ !this._rescc.public &&
+ !this._rescc.immutable)
+ ) {
+ return 0;
+ }
-micromatch.matchKeys = (obj, patterns, options) => {
- if (!utils.isObject(obj)) {
- throw new TypeError('Expected the first argument to be an object');
- }
- let keys = micromatch(Object.keys(obj), patterns, options);
- let res = {};
- for (let key of keys) res[key] = obj[key];
- return res;
-};
+ if (this._resHeaders.vary === '*') {
+ return 0;
+ }
-/**
- * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.some(list, patterns[, options]);
- *
- * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
- * // true
- * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
- * // false
- * ```
- * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
- * @param {String|Array} `patterns` One or more glob patterns to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Boolean} Returns true if any patterns match `str`
- * @api public
- */
+ if (this._isShared) {
+ if (this._rescc['proxy-revalidate']) {
+ return 0;
+ }
+ // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.
+ if (this._rescc['s-maxage']) {
+ return toNumberOrZero(this._rescc['s-maxage']);
+ }
+ }
-micromatch.some = (list, patterns, options) => {
- let items = [].concat(list);
+ // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.
+ if (this._rescc['max-age']) {
+ return toNumberOrZero(this._rescc['max-age']);
+ }
- for (let pattern of [].concat(patterns)) {
- let isMatch = picomatch(String(pattern), options);
- if (items.some(item => isMatch(item))) {
- return true;
+ const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;
+
+ const serverDate = this.date();
+ if (this._resHeaders.expires) {
+ const expires = Date.parse(this._resHeaders.expires);
+ // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").
+ if (Number.isNaN(expires) || expires < serverDate) {
+ return 0;
+ }
+ return Math.max(defaultMinTtl, (expires - serverDate) / 1000);
+ }
+
+ if (this._resHeaders['last-modified']) {
+ const lastModified = Date.parse(this._resHeaders['last-modified']);
+ if (isFinite(lastModified) && serverDate > lastModified) {
+ return Math.max(
+ defaultMinTtl,
+ ((serverDate - lastModified) / 1000) * this._cacheHeuristic
+ );
+ }
+ }
+
+ return defaultMinTtl;
}
- }
- return false;
-};
-/**
- * Returns true if every string in the given `list` matches
- * any of the given glob `patterns`.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.every(list, patterns[, options]);
- *
- * console.log(mm.every('foo.js', ['foo.js']));
- * // true
- * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
- * // true
- * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
- * // false
- * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
- * // false
- * ```
- * @param {String|Array} `list` The string or array of strings to test.
- * @param {String|Array} `patterns` One or more glob patterns to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Boolean} Returns true if any patterns match `str`
- * @api public
- */
+ timeToLive() {
+ const age = this.maxAge() - this.age();
+ const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);
+ const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);
+ return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;
+ }
-micromatch.every = (list, patterns, options) => {
- let items = [].concat(list);
+ stale() {
+ return this.maxAge() <= this.age();
+ }
- for (let pattern of [].concat(patterns)) {
- let isMatch = picomatch(String(pattern), options);
- if (!items.every(item => isMatch(item))) {
- return false;
+ _useStaleIfError() {
+ return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();
+ }
+
+ useStaleWhileRevalidate() {
+ return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();
+ }
+
+ static fromObject(obj) {
+ return new this(undefined, undefined, { _fromObject: obj });
+ }
+
+ _fromObject(obj) {
+ if (this._responseTime) throw Error('Reinitialized');
+ if (!obj || obj.v !== 1) throw Error('Invalid serialization');
+
+ this._responseTime = obj.t;
+ this._isShared = obj.sh;
+ this._cacheHeuristic = obj.ch;
+ this._immutableMinTtl =
+ obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
+ this._status = obj.st;
+ this._resHeaders = obj.resh;
+ this._rescc = obj.rescc;
+ this._method = obj.m;
+ this._url = obj.u;
+ this._host = obj.h;
+ this._noAuthorization = obj.a;
+ this._reqHeaders = obj.reqh;
+ this._reqcc = obj.reqcc;
+ }
+
+ toObject() {
+ return {
+ v: 1,
+ t: this._responseTime,
+ sh: this._isShared,
+ ch: this._cacheHeuristic,
+ imm: this._immutableMinTtl,
+ st: this._status,
+ resh: this._resHeaders,
+ rescc: this._rescc,
+ m: this._method,
+ u: this._url,
+ h: this._host,
+ a: this._noAuthorization,
+ reqh: this._reqHeaders,
+ reqcc: this._reqcc,
+ };
+ }
+
+ /**
+ * Headers for sending to the origin server to revalidate stale response.
+ * Allows server to return 304 to allow reuse of the previous response.
+ *
+ * Hop by hop headers are always stripped.
+ * Revalidation headers may be added or removed, depending on request.
+ */
+ revalidationHeaders(incomingReq) {
+ this._assertRequestHasHeaders(incomingReq);
+ const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);
+
+ // This implementation does not understand range requests
+ delete headers['if-range'];
+
+ if (!this._requestMatches(incomingReq, true) || !this.storable()) {
+ // revalidation allowed via HEAD
+ // not for the same resource, or wasn't allowed to be cached anyway
+ delete headers['if-none-match'];
+ delete headers['if-modified-since'];
+ return headers;
+ }
+
+ /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
+ if (this._resHeaders.etag) {
+ headers['if-none-match'] = headers['if-none-match']
+ ? `${headers['if-none-match']}, ${this._resHeaders.etag}`
+ : this._resHeaders.etag;
+ }
+
+ // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
+ const forbidsWeakValidators =
+ headers['accept-ranges'] ||
+ headers['if-match'] ||
+ headers['if-unmodified-since'] ||
+ (this._method && this._method != 'GET');
+
+ /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.
+ Note: This implementation does not understand partial responses (206) */
+ if (forbidsWeakValidators) {
+ delete headers['if-modified-since'];
+
+ if (headers['if-none-match']) {
+ const etags = headers['if-none-match']
+ .split(/,/)
+ .filter(etag => {
+ return !/^\s*W\//.test(etag);
+ });
+ if (!etags.length) {
+ delete headers['if-none-match'];
+ } else {
+ headers['if-none-match'] = etags.join(',').trim();
+ }
+ }
+ } else if (
+ this._resHeaders['last-modified'] &&
+ !headers['if-modified-since']
+ ) {
+ headers['if-modified-since'] = this._resHeaders['last-modified'];
+ }
+
+ return headers;
+ }
+
+ /**
+ * Creates new CachePolicy with information combined from the previews response,
+ * and the new revalidation response.
+ *
+ * Returns {policy, modified} where modified is a boolean indicating
+ * whether the response body has been modified, and old cached body can't be used.
+ *
+ * @return {Object} {policy: CachePolicy, modified: Boolean}
+ */
+ revalidatedPolicy(request, response) {
+ this._assertRequestHasHeaders(request);
+ if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful
+ return {
+ modified: false,
+ matches: false,
+ policy: this,
+ };
+ }
+ if (!response || !response.headers) {
+ throw Error('Response headers missing');
+ }
+
+ // These aren't going to be supported exactly, since one CachePolicy object
+ // doesn't know about all the other cached objects.
+ let matches = false;
+ if (response.status !== undefined && response.status != 304) {
+ matches = false;
+ } else if (
+ response.headers.etag &&
+ !/^\s*W\//.test(response.headers.etag)
+ ) {
+ // "All of the stored responses with the same strong validator are selected.
+ // If none of the stored responses contain the same strong validator,
+ // then the cache MUST NOT use the new response to update any stored responses."
+ matches =
+ this._resHeaders.etag &&
+ this._resHeaders.etag.replace(/^\s*W\//, '') ===
+ response.headers.etag;
+ } else if (this._resHeaders.etag && response.headers.etag) {
+ // "If the new response contains a weak validator and that validator corresponds
+ // to one of the cache's stored responses,
+ // then the most recent of those matching stored responses is selected for update."
+ matches =
+ this._resHeaders.etag.replace(/^\s*W\//, '') ===
+ response.headers.etag.replace(/^\s*W\//, '');
+ } else if (this._resHeaders['last-modified']) {
+ matches =
+ this._resHeaders['last-modified'] ===
+ response.headers['last-modified'];
+ } else {
+ // If the new response does not include any form of validator (such as in the case where
+ // a client generates an If-Modified-Since request from a source other than the Last-Modified
+ // response header field), and there is only one stored response, and that stored response also
+ // lacks a validator, then that stored response is selected for update.
+ if (
+ !this._resHeaders.etag &&
+ !this._resHeaders['last-modified'] &&
+ !response.headers.etag &&
+ !response.headers['last-modified']
+ ) {
+ matches = true;
+ }
+ }
+
+ if (!matches) {
+ return {
+ policy: new this.constructor(request, response),
+ // Client receiving 304 without body, even if it's invalid/mismatched has no option
+ // but to reuse a cached body. We don't have a good way to tell clients to do
+ // error recovery in such case.
+ modified: response.status != 304,
+ matches: false,
+ };
+ }
+
+ // use other header fields provided in the 304 (Not Modified) response to replace all instances
+ // of the corresponding header fields in the stored response.
+ const headers = {};
+ for (const k in this._resHeaders) {
+ headers[k] =
+ k in response.headers && !excludedFromRevalidationUpdate[k]
+ ? response.headers[k]
+ : this._resHeaders[k];
+ }
+
+ const newResponse = Object.assign({}, response, {
+ status: this._status,
+ method: this._method,
+ headers,
+ });
+ return {
+ policy: new this.constructor(request, newResponse, {
+ shared: this._isShared,
+ cacheHeuristic: this._cacheHeuristic,
+ immutableMinTimeToLive: this._immutableMinTtl,
+ }),
+ modified: false,
+ matches: true,
+ };
}
- }
- return true;
};
-/**
- * Returns true if **all** of the given `patterns` match
- * the specified string.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.all(string, patterns[, options]);
- *
- * console.log(mm.all('foo.js', ['foo.js']));
- * // true
- *
- * console.log(mm.all('foo.js', ['*.js', '!foo.js']));
- * // false
- *
- * console.log(mm.all('foo.js', ['*.js', 'foo.js']));
- * // true
- *
- * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
- * // true
- * ```
- * @param {String|Array} `str` The string to test.
- * @param {String|Array} `patterns` One or more glob patterns to use for matching.
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Boolean} Returns true if any patterns match `str`
- * @api public
- */
-micromatch.all = (str, patterns, options) => {
- if (typeof str !== 'string') {
- throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
- }
+/***/ }),
- return [].concat(patterns).every(p => picomatch(p, options)(str));
+/***/ 9898:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const EventEmitter = __nccwpck_require__(8614);
+const tls = __nccwpck_require__(4016);
+const http2 = __nccwpck_require__(7565);
+const QuickLRU = __nccwpck_require__(9273);
+
+const kCurrentStreamsCount = Symbol('currentStreamsCount');
+const kRequest = Symbol('request');
+const kOriginSet = Symbol('cachedOriginSet');
+const kGracefullyClosing = Symbol('gracefullyClosing');
+
+const nameKeys = [
+ // `http2.connect()` options
+ 'maxDeflateDynamicTableSize',
+ 'maxSessionMemory',
+ 'maxHeaderListPairs',
+ 'maxOutstandingPings',
+ 'maxReservedRemoteStreams',
+ 'maxSendHeaderBlockLength',
+ 'paddingStrategy',
+
+ // `tls.connect()` options
+ 'localAddress',
+ 'path',
+ 'rejectUnauthorized',
+ 'minDHSize',
+
+ // `tls.createSecureContext()` options
+ 'ca',
+ 'cert',
+ 'clientCertEngine',
+ 'ciphers',
+ 'key',
+ 'pfx',
+ 'servername',
+ 'minVersion',
+ 'maxVersion',
+ 'secureProtocol',
+ 'crl',
+ 'honorCipherOrder',
+ 'ecdhCurve',
+ 'dhparam',
+ 'secureOptions',
+ 'sessionIdContext'
+];
+
+const getSortedIndex = (array, value, compare) => {
+ let low = 0;
+ let high = array.length;
+
+ while (low < high) {
+ const mid = (low + high) >>> 1;
+
+ /* istanbul ignore next */
+ if (compare(array[mid], value)) {
+ // This never gets called because we use descending sort. Better to have this anyway.
+ low = mid + 1;
+ } else {
+ high = mid;
+ }
+ }
+
+ return low;
};
-/**
- * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.capture(pattern, string[, options]);
- *
- * console.log(mm.capture('test/*.js', 'test/foo.js'));
- * //=> ['foo']
- * console.log(mm.capture('test/*.js', 'foo/bar.css'));
- * //=> null
- * ```
- * @param {String} `glob` Glob pattern to use for matching.
- * @param {String} `input` String to match
- * @param {Object} `options` See available [options](#options) for changing how matches are performed
- * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
- * @api public
- */
+const compareSessions = (a, b) => {
+ return a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams;
+};
-micromatch.capture = (glob, input, options) => {
- let posix = utils.isWindows(options);
- let regex = picomatch.makeRe(String(glob), { ...options, capture: true });
- let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);
+// See https://tools.ietf.org/html/rfc8336
+const closeCoveredSessions = (where, session) => {
+ // Clients SHOULD NOT emit new requests on any connection whose Origin
+ // Set is a proper subset of another connection's Origin Set, and they
+ // SHOULD close it once all outstanding requests are satisfied.
+ for (const coveredSession of where) {
+ if (
+ // The set is a proper subset when its length is less than the other set.
+ coveredSession[kOriginSet].length < session[kOriginSet].length &&
+
+ // And the other set includes all elements of the subset.
+ coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&
+
+ // Makes sure that the session can handle all requests from the covered session.
+ coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams
+ ) {
+ // This allows pending requests to finish and prevents making new requests.
+ gracefullyClose(coveredSession);
+ }
+ }
+};
- if (match) {
- return match.slice(1).map(v => v === void 0 ? '' : v);
- }
+// This is basically inverted `closeCoveredSessions(...)`.
+const closeSessionIfCovered = (where, coveredSession) => {
+ for (const session of where) {
+ if (
+ coveredSession[kOriginSet].length < session[kOriginSet].length &&
+ coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&
+ coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams
+ ) {
+ gracefullyClose(coveredSession);
+ }
+ }
};
-/**
- * Create a regular expression from the given glob `pattern`.
- *
- * ```js
- * const mm = require('micromatch');
- * // mm.makeRe(pattern[, options]);
- *
- * console.log(mm.makeRe('*.js'));
- * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
- * ```
- * @param {String} `pattern` A glob pattern to convert to regex.
- * @param {Object} `options`
- * @return {RegExp} Returns a regex created from the given pattern.
- * @api public
- */
+const getSessions = ({agent, isFree}) => {
+ const result = {};
-micromatch.makeRe = (...args) => picomatch.makeRe(...args);
+ // eslint-disable-next-line guard-for-in
+ for (const normalizedOptions in agent.sessions) {
+ const sessions = agent.sessions[normalizedOptions];
-/**
- * Scan a glob pattern to separate the pattern into segments. Used
- * by the [split](#split) method.
- *
- * ```js
- * const mm = require('micromatch');
- * const state = mm.scan(pattern[, options]);
- * ```
- * @param {String} `pattern`
- * @param {Object} `options`
- * @return {Object} Returns an object with
- * @api public
- */
+ const filtered = sessions.filter(session => {
+ const result = session[Agent.kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;
-micromatch.scan = (...args) => picomatch.scan(...args);
+ return isFree ? result : !result;
+ });
-/**
- * Parse a glob pattern to create the source string for a regular
- * expression.
- *
- * ```js
- * const mm = require('micromatch');
- * const state = mm(pattern[, options]);
- * ```
- * @param {String} `glob`
- * @param {Object} `options`
- * @return {Object} Returns an object with useful properties and output to be used as regex source string.
- * @api public
- */
+ if (filtered.length !== 0) {
+ result[normalizedOptions] = filtered;
+ }
+ }
-micromatch.parse = (patterns, options) => {
- let res = [];
- for (let pattern of [].concat(patterns || [])) {
- for (let str of braces(String(pattern), options)) {
- res.push(picomatch.parse(str, options));
- }
- }
- return res;
+ return result;
};
-/**
- * Process the given brace `pattern`.
- *
- * ```js
- * const { braces } = require('micromatch');
- * console.log(braces('foo/{a,b,c}/bar'));
- * //=> [ 'foo/(a|b|c)/bar' ]
- *
- * console.log(braces('foo/{a,b,c}/bar', { expand: true }));
- * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
- * ```
- * @param {String} `pattern` String with brace pattern to process.
- * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
- * @return {Array}
- * @api public
- */
+const gracefullyClose = session => {
+ session[kGracefullyClosing] = true;
-micromatch.braces = (pattern, options) => {
- if (typeof pattern !== 'string') throw new TypeError('Expected a string');
- if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
- return [pattern];
- }
- return braces(pattern, options);
+ if (session[kCurrentStreamsCount] === 0) {
+ session.close();
+ }
};
-/**
- * Expand braces
- */
+class Agent extends EventEmitter {
+ constructor({timeout = 60000, maxSessions = Infinity, maxFreeSessions = 10, maxCachedTlsSessions = 100} = {}) {
+ super();
-micromatch.braceExpand = (pattern, options) => {
- if (typeof pattern !== 'string') throw new TypeError('Expected a string');
- return micromatch.braces(pattern, { ...options, expand: true });
-};
+ // A session is considered busy when its current streams count
+ // is equal to or greater than the `maxConcurrentStreams` value.
-/**
- * Expose micromatch
- */
+ // A session is considered free when its current streams count
+ // is less than the `maxConcurrentStreams` value.
-module.exports = micromatch;
+ // SESSIONS[NORMALIZED_OPTIONS] = [];
+ this.sessions = {};
+ // The queue for creating new sessions. It looks like this:
+ // QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION
+ //
+ // The entry function has `listeners`, `completed` and `destroyed` properties.
+ // `listeners` is an array of objects containing `resolve` and `reject` functions.
+ // `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed.
+ // `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet.
+ this.queue = {};
-/***/ }),
+ // Each session will use this timeout value.
+ this.timeout = timeout;
-/***/ 6867:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ // Max sessions in total
+ this.maxSessions = maxSessions;
-"use strict";
-/*!
- * to-regex-range
- *
- * Copyright (c) 2015-present, Jon Schlinkert.
- * Released under the MIT License.
- */
+ // Max free sessions in total
+ // TODO: decreasing `maxFreeSessions` should close some sessions
+ this.maxFreeSessions = maxFreeSessions;
+ this._freeSessionsCount = 0;
+ this._sessionsCount = 0;
+ // We don't support push streams by default.
+ this.settings = {
+ enablePush: false
+ };
-const isNumber = __nccwpck_require__(2840);
+ // Reusing TLS sessions increases performance.
+ this.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions});
+ }
-const toRegexRange = (min, max, options) => {
- if (isNumber(min) === false) {
- throw new TypeError('toRegexRange: expected the first argument to be a number');
- }
+ static normalizeOrigin(url, servername) {
+ if (typeof url === 'string') {
+ url = new URL(url);
+ }
- if (max === void 0 || min === max) {
- return String(min);
- }
+ if (servername && url.hostname !== servername) {
+ url.hostname = servername;
+ }
- if (isNumber(max) === false) {
- throw new TypeError('toRegexRange: expected the second argument to be a number.');
- }
+ return url.origin;
+ }
- let opts = { relaxZeros: true, ...options };
- if (typeof opts.strictZeros === 'boolean') {
- opts.relaxZeros = opts.strictZeros === false;
- }
+ normalizeOptions(options) {
+ let normalized = '';
- let relax = String(opts.relaxZeros);
- let shorthand = String(opts.shorthand);
- let capture = String(opts.capture);
- let wrap = String(opts.wrap);
- let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
+ if (options) {
+ for (const key of nameKeys) {
+ if (options[key]) {
+ normalized += `:${options[key]}`;
+ }
+ }
+ }
- if (toRegexRange.cache.hasOwnProperty(cacheKey)) {
- return toRegexRange.cache[cacheKey].result;
- }
+ return normalized;
+ }
- let a = Math.min(min, max);
- let b = Math.max(min, max);
+ _tryToCreateNewSession(normalizedOptions, normalizedOrigin) {
+ if (!(normalizedOptions in this.queue) || !(normalizedOrigin in this.queue[normalizedOptions])) {
+ return;
+ }
- if (Math.abs(a - b) === 1) {
- let result = min + '|' + max;
- if (opts.capture) {
- return `(${result})`;
- }
- if (opts.wrap === false) {
- return result;
- }
- return `(?:${result})`;
- }
+ const item = this.queue[normalizedOptions][normalizedOrigin];
- let isPadded = hasPadding(min) || hasPadding(max);
- let state = { min, max, a, b };
- let positives = [];
- let negatives = [];
+ // The entry function can be run only once.
+ // BUG: The session may be never created when:
+ // - the first condition is false AND
+ // - this function is never called with the same arguments in the future.
+ if (this._sessionsCount < this.maxSessions && !item.completed) {
+ item.completed = true;
- if (isPadded) {
- state.isPadded = isPadded;
- state.maxLen = String(state.max).length;
- }
+ item();
+ }
+ }
- if (a < 0) {
- let newMin = b < 0 ? Math.abs(b) : 1;
- negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
- a = state.a = 0;
- }
+ getSession(origin, options, listeners) {
+ return new Promise((resolve, reject) => {
+ if (Array.isArray(listeners)) {
+ listeners = [...listeners];
- if (b >= 0) {
- positives = splitToPatterns(a, b, state, opts);
- }
+ // Resolve the current promise ASAP, we're just moving the listeners.
+ // They will be executed at a different time.
+ resolve();
+ } else {
+ listeners = [{resolve, reject}];
+ }
- state.negatives = negatives;
- state.positives = positives;
- state.result = collatePatterns(negatives, positives, opts);
+ const normalizedOptions = this.normalizeOptions(options);
+ const normalizedOrigin = Agent.normalizeOrigin(origin, options && options.servername);
- if (opts.capture === true) {
- state.result = `(${state.result})`;
- } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
- state.result = `(?:${state.result})`;
- }
+ if (normalizedOrigin === undefined) {
+ for (const {reject} of listeners) {
+ reject(new TypeError('The `origin` argument needs to be a string or an URL object'));
+ }
- toRegexRange.cache[cacheKey] = state;
- return state.result;
-};
+ return;
+ }
-function collatePatterns(neg, pos, options) {
- let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];
- let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];
- let intersected = filterPatterns(neg, pos, '-?', true, options) || [];
- let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
- return subpatterns.join('|');
-}
+ if (normalizedOptions in this.sessions) {
+ const sessions = this.sessions[normalizedOptions];
-function splitToRanges(min, max) {
- let nines = 1;
- let zeros = 1;
+ let maxConcurrentStreams = -1;
+ let currentStreamsCount = -1;
+ let optimalSession;
- let stop = countNines(min, nines);
- let stops = new Set([max]);
+ // We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal.
+ // Additionally, we are looking for session which has biggest current pending streams count.
+ for (const session of sessions) {
+ const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams;
- while (min <= stop && stop <= max) {
- stops.add(stop);
- nines += 1;
- stop = countNines(min, nines);
- }
+ if (sessionMaxConcurrentStreams < maxConcurrentStreams) {
+ break;
+ }
- stop = countZeros(max + 1, zeros) - 1;
+ if (session[kOriginSet].includes(normalizedOrigin)) {
+ const sessionCurrentStreamsCount = session[kCurrentStreamsCount];
+
+ if (
+ sessionCurrentStreamsCount >= sessionMaxConcurrentStreams ||
+ session[kGracefullyClosing] ||
+ // Unfortunately the `close` event isn't called immediately,
+ // so `session.destroyed` is `true`, but `session.closed` is `false`.
+ session.destroyed
+ ) {
+ continue;
+ }
- while (min < stop && stop <= max) {
- stops.add(stop);
- zeros += 1;
- stop = countZeros(max + 1, zeros) - 1;
- }
+ // We only need set this once.
+ if (!optimalSession) {
+ maxConcurrentStreams = sessionMaxConcurrentStreams;
+ }
- stops = [...stops];
- stops.sort(compare);
- return stops;
-}
+ // We're looking for the session which has biggest current pending stream count,
+ // in order to minimalize the amount of active sessions.
+ if (sessionCurrentStreamsCount > currentStreamsCount) {
+ optimalSession = session;
+ currentStreamsCount = sessionCurrentStreamsCount;
+ }
+ }
+ }
-/**
- * Convert a range to a regex pattern
- * @param {Number} `start`
- * @param {Number} `stop`
- * @return {String}
- */
+ if (optimalSession) {
+ /* istanbul ignore next: safety check */
+ if (listeners.length !== 1) {
+ for (const {reject} of listeners) {
+ const error = new Error(
+ `Expected the length of listeners to be 1, got ${listeners.length}.\n` +
+ 'Please report this to https://github.com/szmarczak/http2-wrapper/'
+ );
-function rangeToPattern(start, stop, options) {
- if (start === stop) {
- return { pattern: start, count: [], digits: 0 };
- }
+ reject(error);
+ }
- let zipped = zip(start, stop);
- let digits = zipped.length;
- let pattern = '';
- let count = 0;
+ return;
+ }
- for (let i = 0; i < digits; i++) {
- let [startDigit, stopDigit] = zipped[i];
+ listeners[0].resolve(optimalSession);
+ return;
+ }
+ }
- if (startDigit === stopDigit) {
- pattern += startDigit;
+ if (normalizedOptions in this.queue) {
+ if (normalizedOrigin in this.queue[normalizedOptions]) {
+ // There's already an item in the queue, just attach ourselves to it.
+ this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners);
- } else if (startDigit !== '0' || stopDigit !== '9') {
- pattern += toCharacterClass(startDigit, stopDigit, options);
+ // This shouldn't be executed here.
+ // See the comment inside _tryToCreateNewSession.
+ this._tryToCreateNewSession(normalizedOptions, normalizedOrigin);
+ return;
+ }
+ } else {
+ this.queue[normalizedOptions] = {};
+ }
- } else {
- count++;
- }
- }
+ // The entry must be removed from the queue IMMEDIATELY when:
+ // 1. the session connects successfully,
+ // 2. an error occurs.
+ const removeFromQueue = () => {
+ // Our entry can be replaced. We cannot remove the new one.
+ if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) {
+ delete this.queue[normalizedOptions][normalizedOrigin];
- if (count) {
- pattern += options.shorthand === true ? '\\d' : '[0-9]';
- }
+ if (Object.keys(this.queue[normalizedOptions]).length === 0) {
+ delete this.queue[normalizedOptions];
+ }
+ }
+ };
- return { pattern, count: [count], digits };
-}
+ // The main logic is here
+ const entry = () => {
+ const name = `${normalizedOrigin}:${normalizedOptions}`;
+ let receivedSettings = false;
+
+ try {
+ const session = http2.connect(origin, {
+ createConnection: this.createConnection,
+ settings: this.settings,
+ session: this.tlsSessionCache.get(name),
+ ...options
+ });
+ session[kCurrentStreamsCount] = 0;
+ session[kGracefullyClosing] = false;
+
+ const isFree = () => session[kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;
+ let wasFree = true;
+
+ session.socket.once('session', tlsSession => {
+ this.tlsSessionCache.set(name, tlsSession);
+ });
+
+ session.once('error', error => {
+ // Listeners are empty when the session successfully connected.
+ for (const {reject} of listeners) {
+ reject(error);
+ }
-function splitToPatterns(min, max, tok, options) {
- let ranges = splitToRanges(min, max);
- let tokens = [];
- let start = min;
- let prev;
+ // The connection got broken, purge the cache.
+ this.tlsSessionCache.delete(name);
+ });
+
+ session.setTimeout(this.timeout, () => {
+ // Terminates all streams owned by this session.
+ // TODO: Maybe the streams should have a "Session timed out" error?
+ session.destroy();
+ });
+
+ session.once('close', () => {
+ if (receivedSettings) {
+ // 1. If it wasn't free then no need to decrease because
+ // it has been decreased already in session.request().
+ // 2. `stream.once('close')` won't increment the count
+ // because the session is already closed.
+ if (wasFree) {
+ this._freeSessionsCount--;
+ }
+
+ this._sessionsCount--;
+
+ // This cannot be moved to the stream logic,
+ // because there may be a session that hadn't made a single request.
+ const where = this.sessions[normalizedOptions];
+ where.splice(where.indexOf(session), 1);
+
+ if (where.length === 0) {
+ delete this.sessions[normalizedOptions];
+ }
+ } else {
+ // Broken connection
+ const error = new Error('Session closed without receiving a SETTINGS frame');
+ error.code = 'HTTP2WRAPPER_NOSETTINGS';
+
+ for (const {reject} of listeners) {
+ reject(error);
+ }
+
+ removeFromQueue();
+ }
+
+ // There may be another session awaiting.
+ this._tryToCreateNewSession(normalizedOptions, normalizedOrigin);
+ });
+
+ // Iterates over the queue and processes listeners.
+ const processListeners = () => {
+ if (!(normalizedOptions in this.queue) || !isFree()) {
+ return;
+ }
- for (let i = 0; i < ranges.length; i++) {
- let max = ranges[i];
- let obj = rangeToPattern(String(start), String(max), options);
- let zeros = '';
+ for (const origin of session[kOriginSet]) {
+ if (origin in this.queue[normalizedOptions]) {
+ const {listeners} = this.queue[normalizedOptions][origin];
+
+ // Prevents session overloading.
+ while (listeners.length !== 0 && isFree()) {
+ // We assume `resolve(...)` calls `request(...)` *directly*,
+ // otherwise the session will get overloaded.
+ listeners.shift().resolve(session);
+ }
+
+ const where = this.queue[normalizedOptions];
+ if (where[origin].listeners.length === 0) {
+ delete where[origin];
+
+ if (Object.keys(where).length === 0) {
+ delete this.queue[normalizedOptions];
+ break;
+ }
+ }
+
+ // We're no longer free, no point in continuing.
+ if (!isFree()) {
+ break;
+ }
+ }
+ }
+ };
- if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
- if (prev.count.length > 1) {
- prev.count.pop();
- }
+ // The Origin Set cannot shrink. No need to check if it suddenly became covered by another one.
+ session.on('origin', () => {
+ session[kOriginSet] = session.originSet;
- prev.count.push(obj.count[0]);
- prev.string = prev.pattern + toQuantifier(prev.count);
- start = max + 1;
- continue;
- }
+ if (!isFree()) {
+ // The session is full.
+ return;
+ }
- if (tok.isPadded) {
- zeros = padZeros(max, tok, options);
- }
+ processListeners();
- obj.string = zeros + obj.pattern + toQuantifier(obj.count);
- tokens.push(obj);
- start = max + 1;
- prev = obj;
- }
+ // Close covered sessions (if possible).
+ closeCoveredSessions(this.sessions[normalizedOptions], session);
+ });
- return tokens;
-}
+ session.once('remoteSettings', () => {
+ // Fix Node.js bug preventing the process from exiting
+ session.ref();
+ session.unref();
-function filterPatterns(arr, comparison, prefix, intersection, options) {
- let result = [];
+ this._sessionsCount++;
- for (let ele of arr) {
- let { string } = ele;
+ // The Agent could have been destroyed already.
+ if (entry.destroyed) {
+ const error = new Error('Agent has been destroyed');
- // only push if _both_ are negative...
- if (!intersection && !contains(comparison, 'string', string)) {
- result.push(prefix + string);
- }
+ for (const listener of listeners) {
+ listener.reject(error);
+ }
- // or _both_ are positive
- if (intersection && contains(comparison, 'string', string)) {
- result.push(prefix + string);
- }
- }
- return result;
-}
+ session.destroy();
+ return;
+ }
-/**
- * Zip strings
- */
+ session[kOriginSet] = session.originSet;
-function zip(a, b) {
- let arr = [];
- for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
- return arr;
-}
+ {
+ const where = this.sessions;
-function compare(a, b) {
- return a > b ? 1 : b > a ? -1 : 0;
-}
+ if (normalizedOptions in where) {
+ const sessions = where[normalizedOptions];
+ sessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session);
+ } else {
+ where[normalizedOptions] = [session];
+ }
+ }
-function contains(arr, key, val) {
- return arr.some(ele => ele[key] === val);
-}
+ this._freeSessionsCount += 1;
+ receivedSettings = true;
-function countNines(min, len) {
- return Number(String(min).slice(0, -len) + '9'.repeat(len));
-}
+ this.emit('session', session);
-function countZeros(integer, zeros) {
- return integer - (integer % Math.pow(10, zeros));
-}
+ processListeners();
+ removeFromQueue();
-function toQuantifier(digits) {
- let [start = 0, stop = ''] = digits;
- if (stop || start > 1) {
- return `{${start + (stop ? ',' + stop : '')}}`;
- }
- return '';
-}
+ // TODO: Close last recently used (or least used?) session
+ if (session[kCurrentStreamsCount] === 0 && this._freeSessionsCount > this.maxFreeSessions) {
+ session.close();
+ }
-function toCharacterClass(a, b, options) {
- return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
-}
+ // Check if we haven't managed to execute all listeners.
+ if (listeners.length !== 0) {
+ // Request for a new session with predefined listeners.
+ this.getSession(normalizedOrigin, options, listeners);
+ listeners.length = 0;
+ }
-function hasPadding(str) {
- return /^-?(0+)\d/.test(str);
-}
+ // `session.remoteSettings.maxConcurrentStreams` might get increased
+ session.on('remoteSettings', () => {
+ processListeners();
-function padZeros(value, tok, options) {
- if (!tok.isPadded) {
- return value;
- }
+ // In case the Origin Set changes
+ closeCoveredSessions(this.sessions[normalizedOptions], session);
+ });
+ });
- let diff = Math.abs(tok.maxLen - String(value).length);
- let relax = options.relaxZeros !== false;
+ // Shim `session.request()` in order to catch all streams
+ session[kRequest] = session.request;
+ session.request = (headers, streamOptions) => {
+ if (session[kGracefullyClosing]) {
+ throw new Error('The session is gracefully closing. No new streams are allowed.');
+ }
- switch (diff) {
- case 0:
- return '';
- case 1:
- return relax ? '0?' : '0';
- case 2:
- return relax ? '0{0,2}' : '00';
- default: {
- return relax ? `0{0,${diff}}` : `0{${diff}}`;
- }
- }
-}
+ const stream = session[kRequest](headers, streamOptions);
-/**
- * Cache
- */
+ // The process won't exit until the session is closed or all requests are gone.
+ session.ref();
-toRegexRange.cache = {};
-toRegexRange.clearCache = () => (toRegexRange.cache = {});
+ ++session[kCurrentStreamsCount];
-/**
- * Expose `toRegexRange`
- */
+ if (session[kCurrentStreamsCount] === session.remoteSettings.maxConcurrentStreams) {
+ this._freeSessionsCount--;
+ }
-module.exports = toRegexRange;
+ stream.once('close', () => {
+ wasFree = isFree();
+ --session[kCurrentStreamsCount];
-/***/ }),
+ if (!session.destroyed && !session.closed) {
+ closeSessionIfCovered(this.sessions[normalizedOptions], session);
-/***/ 3664:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ if (isFree() && !session.closed) {
+ if (!wasFree) {
+ this._freeSessionsCount++;
-"use strict";
+ wasFree = true;
+ }
-const taskManager = __nccwpck_require__(2708);
-const async_1 = __nccwpck_require__(5679);
-const stream_1 = __nccwpck_require__(4630);
-const sync_1 = __nccwpck_require__(2405);
-const settings_1 = __nccwpck_require__(952);
-const utils = __nccwpck_require__(5444);
-async function FastGlob(source, options) {
- assertPatternsInput(source);
- const works = getWorks(source, async_1.default, options);
- const result = await Promise.all(works);
- return utils.array.flatten(result);
-}
-// https://github.com/typescript-eslint/typescript-eslint/issues/60
-// eslint-disable-next-line no-redeclare
-(function (FastGlob) {
- function sync(source, options) {
- assertPatternsInput(source);
- const works = getWorks(source, sync_1.default, options);
- return utils.array.flatten(works);
- }
- FastGlob.sync = sync;
- function stream(source, options) {
- assertPatternsInput(source);
- const works = getWorks(source, stream_1.default, options);
- /**
- * The stream returned by the provider cannot work with an asynchronous iterator.
- * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
- * This affects performance (+25%). I don't see best solution right now.
- */
- return utils.stream.merge(works);
- }
- FastGlob.stream = stream;
- function generateTasks(source, options) {
- assertPatternsInput(source);
- const patterns = [].concat(source);
- const settings = new settings_1.default(options);
- return taskManager.generate(patterns, settings);
- }
- FastGlob.generateTasks = generateTasks;
- function isDynamicPattern(source, options) {
- assertPatternsInput(source);
- const settings = new settings_1.default(options);
- return utils.pattern.isDynamicPattern(source, settings);
- }
- FastGlob.isDynamicPattern = isDynamicPattern;
- function escapePath(source) {
- assertPatternsInput(source);
- return utils.path.escape(source);
- }
- FastGlob.escapePath = escapePath;
-})(FastGlob || (FastGlob = {}));
-function getWorks(source, _Provider, options) {
- const patterns = [].concat(source);
- const settings = new settings_1.default(options);
- const tasks = taskManager.generate(patterns, settings);
- const provider = new _Provider(settings);
- return tasks.map(provider.read, provider);
-}
-function assertPatternsInput(input) {
- const source = [].concat(input);
- const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
- if (!isValidSource) {
- throw new TypeError('Patterns must be a string (non empty) or an array of strings');
- }
-}
-module.exports = FastGlob;
+ const isEmpty = session[kCurrentStreamsCount] === 0;
+ if (isEmpty) {
+ session.unref();
+ }
-/***/ }),
+ if (
+ isEmpty &&
+ (
+ this._freeSessionsCount > this.maxFreeSessions ||
+ session[kGracefullyClosing]
+ )
+ ) {
+ session.close();
+ } else {
+ closeCoveredSessions(this.sessions[normalizedOptions], session);
+ processListeners();
+ }
+ }
+ }
+ });
-/***/ 2708:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ return stream;
+ };
+ } catch (error) {
+ for (const listener of listeners) {
+ listener.reject(error);
+ }
-"use strict";
+ removeFromQueue();
+ }
+ };
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
-const utils = __nccwpck_require__(5444);
-function generate(patterns, settings) {
- const positivePatterns = getPositivePatterns(patterns);
- const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);
- const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));
- const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));
- const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
- const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
- return staticTasks.concat(dynamicTasks);
-}
-exports.generate = generate;
-function convertPatternsToTasks(positive, negative, dynamic) {
- const positivePatternsGroup = groupPatternsByBaseDirectory(positive);
- // When we have a global group â there is no reason to divide the patterns into independent tasks.
- // In this case, the global task covers the rest.
- if ('.' in positivePatternsGroup) {
- const task = convertPatternGroupToTask('.', positive, negative, dynamic);
- return [task];
- }
- return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);
-}
-exports.convertPatternsToTasks = convertPatternsToTasks;
-function getPositivePatterns(patterns) {
- return utils.pattern.getPositivePatterns(patterns);
-}
-exports.getPositivePatterns = getPositivePatterns;
-function getNegativePatternsAsPositive(patterns, ignore) {
- const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);
- const positive = negative.map(utils.pattern.convertToPositivePattern);
- return positive;
-}
-exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
-function groupPatternsByBaseDirectory(patterns) {
- const group = {};
- return patterns.reduce((collection, pattern) => {
- const base = utils.pattern.getBaseDirectory(pattern);
- if (base in collection) {
- collection[base].push(pattern);
- }
- else {
- collection[base] = [pattern];
- }
- return collection;
- }, group);
-}
-exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
-function convertPatternGroupsToTasks(positive, negative, dynamic) {
- return Object.keys(positive).map((base) => {
- return convertPatternGroupToTask(base, positive[base], negative, dynamic);
- });
-}
-exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
-function convertPatternGroupToTask(base, positive, negative, dynamic) {
- return {
- dynamic,
- positive,
- negative,
- base,
- patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))
- };
-}
-exports.convertPatternGroupToTask = convertPatternGroupToTask;
+ entry.listeners = listeners;
+ entry.completed = false;
+ entry.destroyed = false;
+
+ this.queue[normalizedOptions][normalizedOrigin] = entry;
+ this._tryToCreateNewSession(normalizedOptions, normalizedOrigin);
+ });
+ }
+ request(origin, options, headers, streamOptions) {
+ return new Promise((resolve, reject) => {
+ this.getSession(origin, options, [{
+ reject,
+ resolve: session => {
+ try {
+ resolve(session.request(headers, streamOptions));
+ } catch (error) {
+ reject(error);
+ }
+ }
+ }]);
+ });
+ }
-/***/ }),
+ createConnection(origin, options) {
+ return Agent.connect(origin, options);
+ }
-/***/ 5679:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ static connect(origin, options) {
+ options.ALPNProtocols = ['h2'];
-"use strict";
+ const port = origin.port || 443;
+ const host = origin.hostname || origin.host;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const stream_1 = __nccwpck_require__(2083);
-const provider_1 = __nccwpck_require__(257);
-class ProviderAsync extends provider_1.default {
- constructor() {
- super(...arguments);
- this._reader = new stream_1.default(this._settings);
- }
- read(task) {
- const root = this._getRootDirectory(task);
- const options = this._getReaderOptions(task);
- const entries = [];
- return new Promise((resolve, reject) => {
- const stream = this.api(root, task, options);
- stream.once('error', reject);
- stream.on('data', (entry) => entries.push(options.transform(entry)));
- stream.once('end', () => resolve(entries));
- });
- }
- api(root, task, options) {
- if (task.dynamic) {
- return this._reader.dynamic(root, options);
- }
- return this._reader.static(task.patterns, options);
- }
-}
-exports.default = ProviderAsync;
+ if (typeof options.servername === 'undefined') {
+ options.servername = host;
+ }
+ return tls.connect(port, host, options);
+ }
-/***/ }),
+ closeFreeSessions() {
+ for (const sessions of Object.values(this.sessions)) {
+ for (const session of sessions) {
+ if (session[kCurrentStreamsCount] === 0) {
+ session.close();
+ }
+ }
+ }
+ }
-/***/ 6983:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ destroy(reason) {
+ for (const sessions of Object.values(this.sessions)) {
+ for (const session of sessions) {
+ session.destroy(reason);
+ }
+ }
-"use strict";
+ for (const entriesOfAuthority of Object.values(this.queue)) {
+ for (const entry of Object.values(entriesOfAuthority)) {
+ entry.destroyed = true;
+ }
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
-const partial_1 = __nccwpck_require__(5295);
-class DeepFilter {
- constructor(_settings, _micromatchOptions) {
- this._settings = _settings;
- this._micromatchOptions = _micromatchOptions;
- }
- getFilter(basePath, positive, negative) {
- const matcher = this._getMatcher(positive);
- const negativeRe = this._getNegativePatternsRe(negative);
- return (entry) => this._filter(basePath, entry, matcher, negativeRe);
- }
- _getMatcher(patterns) {
- return new partial_1.default(patterns, this._settings, this._micromatchOptions);
- }
- _getNegativePatternsRe(patterns) {
- const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
- return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
- }
- _filter(basePath, entry, matcher, negativeRe) {
- if (this._isSkippedByDeep(basePath, entry.path)) {
- return false;
- }
- if (this._isSkippedSymbolicLink(entry)) {
- return false;
- }
- const filepath = utils.path.removeLeadingDotSegment(entry.path);
- if (this._isSkippedByPositivePatterns(filepath, matcher)) {
- return false;
- }
- return this._isSkippedByNegativePatterns(filepath, negativeRe);
- }
- _isSkippedByDeep(basePath, entryPath) {
- /**
- * Avoid unnecessary depth calculations when it doesn't matter.
- */
- if (this._settings.deep === Infinity) {
- return false;
- }
- return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
- }
- _getEntryLevel(basePath, entryPath) {
- const entryPathDepth = entryPath.split('/').length;
- if (basePath === '') {
- return entryPathDepth;
- }
- const basePathDepth = basePath.split('/').length;
- return entryPathDepth - basePathDepth;
- }
- _isSkippedSymbolicLink(entry) {
- return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
- }
- _isSkippedByPositivePatterns(entryPath, matcher) {
- return !this._settings.baseNameMatch && !matcher.match(entryPath);
- }
- _isSkippedByNegativePatterns(entryPath, patternsRe) {
- return !utils.pattern.matchAny(entryPath, patternsRe);
- }
+ // New requests should NOT attach to destroyed sessions
+ this.queue = {};
+ }
+
+ get freeSessions() {
+ return getSessions({agent: this, isFree: true});
+ }
+
+ get busySessions() {
+ return getSessions({agent: this, isFree: false});
+ }
}
-exports.default = DeepFilter;
+
+Agent.kCurrentStreamsCount = kCurrentStreamsCount;
+Agent.kGracefullyClosing = kGracefullyClosing;
+
+module.exports = {
+ Agent,
+ globalAgent: new Agent()
+};
/***/ }),
-/***/ 1343:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 7167:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
-class EntryFilter {
- constructor(_settings, _micromatchOptions) {
- this._settings = _settings;
- this._micromatchOptions = _micromatchOptions;
- this.index = new Map();
- }
- getFilter(positive, negative) {
- const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);
- const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);
- return (entry) => this._filter(entry, positiveRe, negativeRe);
- }
- _filter(entry, positiveRe, negativeRe) {
- if (this._settings.unique && this._isDuplicateEntry(entry)) {
- return false;
- }
- if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
- return false;
- }
- if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
- return false;
- }
- const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
- const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
- if (this._settings.unique && isMatched) {
- this._createIndexRecord(entry);
- }
- return isMatched;
- }
- _isDuplicateEntry(entry) {
- return this.index.has(entry.path);
- }
- _createIndexRecord(entry) {
- this.index.set(entry.path, undefined);
- }
- _onlyFileFilter(entry) {
- return this._settings.onlyFiles && !entry.dirent.isFile();
- }
- _onlyDirectoryFilter(entry) {
- return this._settings.onlyDirectories && !entry.dirent.isDirectory();
- }
- _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
- if (!this._settings.absolute) {
- return false;
- }
- const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
- return utils.pattern.matchAny(fullpath, patternsRe);
- }
- _isMatchToPatterns(entryPath, patternsRe) {
- const filepath = utils.path.removeLeadingDotSegment(entryPath);
- return utils.pattern.matchAny(filepath, patternsRe);
- }
-}
-exports.default = EntryFilter;
+const http = __nccwpck_require__(8605);
+const https = __nccwpck_require__(7211);
+const resolveALPN = __nccwpck_require__(6624);
+const QuickLRU = __nccwpck_require__(9273);
+const Http2ClientRequest = __nccwpck_require__(9632);
+const calculateServerName = __nccwpck_require__(1982);
+const urlToOptions = __nccwpck_require__(2686);
+const cache = new QuickLRU({maxSize: 100});
+const queue = new Map();
-/***/ }),
+const installSocket = (agent, socket, options) => {
+ socket._httpMessage = {shouldKeepAlive: true};
-/***/ 6654:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ const onFree = () => {
+ agent.emit('free', socket, options);
+ };
-"use strict";
+ socket.on('free', onFree);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
-class ErrorFilter {
- constructor(_settings) {
- this._settings = _settings;
- }
- getFilter() {
- return (error) => this._isNonFatalError(error);
- }
- _isNonFatalError(error) {
- return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
- }
-}
-exports.default = ErrorFilter;
+ const onClose = () => {
+ agent.removeSocket(socket, options);
+ };
+ socket.on('close', onClose);
-/***/ }),
+ const onRemove = () => {
+ agent.removeSocket(socket, options);
+ socket.off('close', onClose);
+ socket.off('free', onFree);
+ socket.off('agentRemove', onRemove);
+ };
-/***/ 2576:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ socket.on('agentRemove', onRemove);
-"use strict";
+ agent.emit('free', socket, options);
+};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
-class Matcher {
- constructor(_patterns, _settings, _micromatchOptions) {
- this._patterns = _patterns;
- this._settings = _settings;
- this._micromatchOptions = _micromatchOptions;
- this._storage = [];
- this._fillStorage();
- }
- _fillStorage() {
- /**
- * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).
- * So, before expand patterns with brace expansion into separated patterns.
- */
- const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);
- for (const pattern of patterns) {
- const segments = this._getPatternSegments(pattern);
- const sections = this._splitSegmentsIntoSections(segments);
- this._storage.push({
- complete: sections.length <= 1,
- pattern,
- segments,
- sections
- });
- }
- }
- _getPatternSegments(pattern) {
- const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);
- return parts.map((part) => {
- const dynamic = utils.pattern.isDynamicPattern(part, this._settings);
- if (!dynamic) {
- return {
- dynamic: false,
- pattern: part
- };
- }
- return {
- dynamic: true,
- pattern: part,
- patternRe: utils.pattern.makeRe(part, this._micromatchOptions)
- };
- });
- }
- _splitSegmentsIntoSections(segments) {
- return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));
- }
-}
-exports.default = Matcher;
+const resolveProtocol = async options => {
+ const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`;
+
+ if (!cache.has(name)) {
+ if (queue.has(name)) {
+ const result = await queue.get(name);
+ return result.alpnProtocol;
+ }
+ const {path, agent} = options;
+ options.path = options.socketPath;
-/***/ }),
+ const resultPromise = resolveALPN(options);
+ queue.set(name, resultPromise);
-/***/ 5295:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ try {
+ const {socket, alpnProtocol} = await resultPromise;
+ cache.set(name, alpnProtocol);
-"use strict";
+ options.path = path;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const matcher_1 = __nccwpck_require__(2576);
-class PartialMatcher extends matcher_1.default {
- match(filepath) {
- const parts = filepath.split('/');
- const levels = parts.length;
- const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
- for (const pattern of patterns) {
- const section = pattern.sections[0];
- /**
- * In this case, the pattern has a globstar and we must read all directories unconditionally,
- * but only if the level has reached the end of the first group.
- *
- * fixtures/{a,b}/**
- * ^ true/false ^ always true
- */
- if (!pattern.complete && levels > section.length) {
- return true;
- }
- const match = parts.every((part, index) => {
- const segment = pattern.segments[index];
- if (segment.dynamic && segment.patternRe.test(part)) {
- return true;
- }
- if (!segment.dynamic && segment.pattern === part) {
- return true;
- }
- return false;
- });
- if (match) {
- return true;
- }
- }
- return false;
- }
-}
-exports.default = PartialMatcher;
+ if (alpnProtocol === 'h2') {
+ // https://github.com/nodejs/node/issues/33343
+ socket.destroy();
+ } else {
+ const {globalAgent} = https;
+ const defaultCreateConnection = https.Agent.prototype.createConnection;
+ if (agent) {
+ if (agent.createConnection === defaultCreateConnection) {
+ installSocket(agent, socket, options);
+ } else {
+ socket.destroy();
+ }
+ } else if (globalAgent.createConnection === defaultCreateConnection) {
+ installSocket(globalAgent, socket, options);
+ } else {
+ socket.destroy();
+ }
+ }
-/***/ }),
+ queue.delete(name);
-/***/ 257:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ return alpnProtocol;
+ } catch (error) {
+ queue.delete(name);
-"use strict";
+ throw error;
+ }
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const path = __nccwpck_require__(5622);
-const deep_1 = __nccwpck_require__(6983);
-const entry_1 = __nccwpck_require__(1343);
-const error_1 = __nccwpck_require__(6654);
-const entry_2 = __nccwpck_require__(4029);
-class Provider {
- constructor(_settings) {
- this._settings = _settings;
- this.errorFilter = new error_1.default(this._settings);
- this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
- this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
- this.entryTransformer = new entry_2.default(this._settings);
- }
- _getRootDirectory(task) {
- return path.resolve(this._settings.cwd, task.base);
- }
- _getReaderOptions(task) {
- const basePath = task.base === '.' ? '' : task.base;
- return {
- basePath,
- pathSegmentSeparator: '/',
- concurrency: this._settings.concurrency,
- deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
- entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
- errorFilter: this.errorFilter.getFilter(),
- followSymbolicLinks: this._settings.followSymbolicLinks,
- fs: this._settings.fs,
- stats: this._settings.stats,
- throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
- transform: this.entryTransformer.getTransformer()
- };
- }
- _getMicromatchOptions() {
- return {
- dot: this._settings.dot,
- matchBase: this._settings.baseNameMatch,
- nobrace: !this._settings.braceExpansion,
- nocase: !this._settings.caseSensitiveMatch,
- noext: !this._settings.extglob,
- noglobstar: !this._settings.globstar,
- posix: true,
- strictSlashes: false
- };
- }
-}
-exports.default = Provider;
+ return cache.get(name);
+};
+module.exports = async (input, options, callback) => {
+ if (typeof input === 'string' || input instanceof URL) {
+ input = urlToOptions(new URL(input));
+ }
-/***/ }),
+ if (typeof options === 'function') {
+ callback = options;
+ options = undefined;
+ }
-/***/ 4630:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ options = {
+ ALPNProtocols: ['h2', 'http/1.1'],
+ ...input,
+ ...options,
+ resolveSocket: true
+ };
-"use strict";
+ if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) {
+ throw new Error('The `ALPNProtocols` option must be an Array with at least one entry');
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const stream_1 = __nccwpck_require__(2413);
-const stream_2 = __nccwpck_require__(2083);
-const provider_1 = __nccwpck_require__(257);
-class ProviderStream extends provider_1.default {
- constructor() {
- super(...arguments);
- this._reader = new stream_2.default(this._settings);
- }
- read(task) {
- const root = this._getRootDirectory(task);
- const options = this._getReaderOptions(task);
- const source = this.api(root, task, options);
- const destination = new stream_1.Readable({ objectMode: true, read: () => { } });
- source
- .once('error', (error) => destination.emit('error', error))
- .on('data', (entry) => destination.emit('data', options.transform(entry)))
- .once('end', () => destination.emit('end'));
- destination
- .once('close', () => source.destroy());
- return destination;
- }
- api(root, task, options) {
- if (task.dynamic) {
- return this._reader.dynamic(root, options);
- }
- return this._reader.static(task.patterns, options);
- }
-}
-exports.default = ProviderStream;
+ options.protocol = options.protocol || 'https:';
+ const isHttps = options.protocol === 'https:';
+ options.host = options.hostname || options.host || 'localhost';
+ options.session = options.tlsSession;
+ options.servername = options.servername || calculateServerName(options);
+ options.port = options.port || (isHttps ? 443 : 80);
+ options._defaultAgent = isHttps ? https.globalAgent : http.globalAgent;
-/***/ }),
+ const agents = options.agent;
-/***/ 2405:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ if (agents) {
+ if (agents.addRequest) {
+ throw new Error('The `options.agent` object can contain only `http`, `https` or `http2` properties');
+ }
-"use strict";
+ options.agent = agents[isHttps ? 'https' : 'http'];
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const sync_1 = __nccwpck_require__(8821);
-const provider_1 = __nccwpck_require__(257);
-class ProviderSync extends provider_1.default {
- constructor() {
- super(...arguments);
- this._reader = new sync_1.default(this._settings);
- }
- read(task) {
- const root = this._getRootDirectory(task);
- const options = this._getReaderOptions(task);
- const entries = this.api(root, task, options);
- return entries.map(options.transform);
- }
- api(root, task, options) {
- if (task.dynamic) {
- return this._reader.dynamic(root, options);
- }
- return this._reader.static(task.patterns, options);
- }
-}
-exports.default = ProviderSync;
+ if (isHttps) {
+ const protocol = await resolveProtocol(options);
+
+ if (protocol === 'h2') {
+ if (agents) {
+ options.agent = agents.http2;
+ }
+
+ return new Http2ClientRequest(options, callback);
+ }
+ }
+
+ return http.request(options, callback);
+};
+
+module.exports.protocolCache = cache;
/***/ }),
-/***/ 4029:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 9632:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
-class EntryTransformer {
- constructor(_settings) {
- this._settings = _settings;
- }
- getTransformer() {
- return (entry) => this._transform(entry);
- }
- _transform(entry) {
- let filepath = entry.path;
- if (this._settings.absolute) {
- filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);
- filepath = utils.path.unixify(filepath);
- }
- if (this._settings.markDirectories && entry.dirent.isDirectory()) {
- filepath += '/';
- }
- if (!this._settings.objectMode) {
- return filepath;
- }
- return Object.assign(Object.assign({}, entry), { path: filepath });
- }
-}
-exports.default = EntryTransformer;
+const http2 = __nccwpck_require__(7565);
+const {Writable} = __nccwpck_require__(2413);
+const {Agent, globalAgent} = __nccwpck_require__(9898);
+const IncomingMessage = __nccwpck_require__(2575);
+const urlToOptions = __nccwpck_require__(2686);
+const proxyEvents = __nccwpck_require__(1818);
+const isRequestPseudoHeader = __nccwpck_require__(1199);
+const {
+ ERR_INVALID_ARG_TYPE,
+ ERR_INVALID_PROTOCOL,
+ ERR_HTTP_HEADERS_SENT,
+ ERR_INVALID_HTTP_TOKEN,
+ ERR_HTTP_INVALID_HEADER_VALUE,
+ ERR_INVALID_CHAR
+} = __nccwpck_require__(7087);
+
+const {
+ HTTP2_HEADER_STATUS,
+ HTTP2_HEADER_METHOD,
+ HTTP2_HEADER_PATH,
+ HTTP2_METHOD_CONNECT
+} = http2.constants;
+
+const kHeaders = Symbol('headers');
+const kOrigin = Symbol('origin');
+const kSession = Symbol('session');
+const kOptions = Symbol('options');
+const kFlushedHeaders = Symbol('flushedHeaders');
+const kJobs = Symbol('jobs');
+
+const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/;
+const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/;
+
+class ClientRequest extends Writable {
+ constructor(input, options, callback) {
+ super({
+ autoDestroy: false
+ });
+
+ const hasInput = typeof input === 'string' || input instanceof URL;
+ if (hasInput) {
+ input = urlToOptions(input instanceof URL ? input : new URL(input));
+ }
+
+ if (typeof options === 'function' || options === undefined) {
+ // (options, callback)
+ callback = options;
+ options = hasInput ? input : {...input};
+ } else {
+ // (input, options, callback)
+ options = {...input, ...options};
+ }
+
+ if (options.h2session) {
+ this[kSession] = options.h2session;
+ } else if (options.agent === false) {
+ this.agent = new Agent({maxFreeSessions: 0});
+ } else if (typeof options.agent === 'undefined' || options.agent === null) {
+ if (typeof options.createConnection === 'function') {
+ // This is a workaround - we don't have to create the session on our own.
+ this.agent = new Agent({maxFreeSessions: 0});
+ this.agent.createConnection = options.createConnection;
+ } else {
+ this.agent = globalAgent;
+ }
+ } else if (typeof options.agent.request === 'function') {
+ this.agent = options.agent;
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('options.agent', ['Agent-like Object', 'undefined', 'false'], options.agent);
+ }
+
+ if (options.protocol && options.protocol !== 'https:') {
+ throw new ERR_INVALID_PROTOCOL(options.protocol, 'https:');
+ }
+
+ const port = options.port || options.defaultPort || (this.agent && this.agent.defaultPort) || 443;
+ const host = options.hostname || options.host || 'localhost';
+
+ // Don't enforce the origin via options. It may be changed in an Agent.
+ delete options.hostname;
+ delete options.host;
+ delete options.port;
+
+ const {timeout} = options;
+ options.timeout = undefined;
+
+ this[kHeaders] = Object.create(null);
+ this[kJobs] = [];
+
+ this.socket = null;
+ this.connection = null;
+
+ this.method = options.method || 'GET';
+ this.path = options.path;
+
+ this.res = null;
+ this.aborted = false;
+ this.reusedSocket = false;
+
+ if (options.headers) {
+ for (const [header, value] of Object.entries(options.headers)) {
+ this.setHeader(header, value);
+ }
+ }
+
+ if (options.auth && !('authorization' in this[kHeaders])) {
+ this[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64');
+ }
+
+ options.session = options.tlsSession;
+ options.path = options.socketPath;
+
+ this[kOptions] = options;
+
+ // Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field.
+ if (port === 443) {
+ this[kOrigin] = `https://${host}`;
+
+ if (!(':authority' in this[kHeaders])) {
+ this[kHeaders][':authority'] = host;
+ }
+ } else {
+ this[kOrigin] = `https://${host}:${port}`;
+
+ if (!(':authority' in this[kHeaders])) {
+ this[kHeaders][':authority'] = `${host}:${port}`;
+ }
+ }
+
+ if (timeout) {
+ this.setTimeout(timeout);
+ }
+
+ if (callback) {
+ this.once('response', callback);
+ }
+
+ this[kFlushedHeaders] = false;
+ }
+
+ get method() {
+ return this[kHeaders][HTTP2_HEADER_METHOD];
+ }
+
+ set method(value) {
+ if (value) {
+ this[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase();
+ }
+ }
+
+ get path() {
+ return this[kHeaders][HTTP2_HEADER_PATH];
+ }
+
+ set path(value) {
+ if (value) {
+ this[kHeaders][HTTP2_HEADER_PATH] = value;
+ }
+ }
+ get _mustNotHaveABody() {
+ return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE';
+ }
-/***/ }),
+ _write(chunk, encoding, callback) {
+ // https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156
+ if (this._mustNotHaveABody) {
+ callback(new Error('The GET, HEAD and DELETE methods must NOT have a body'));
+ /* istanbul ignore next: Node.js 12 throws directly */
+ return;
+ }
-/***/ 8062:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this.flushHeaders();
-"use strict";
+ const callWrite = () => this._request.write(chunk, encoding, callback);
+ if (this._request) {
+ callWrite();
+ } else {
+ this[kJobs].push(callWrite);
+ }
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const path = __nccwpck_require__(5622);
-const fsStat = __nccwpck_require__(109);
-const utils = __nccwpck_require__(5444);
-class Reader {
- constructor(_settings) {
- this._settings = _settings;
- this._fsStatSettings = new fsStat.Settings({
- followSymbolicLink: this._settings.followSymbolicLinks,
- fs: this._settings.fs,
- throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
- });
- }
- _getFullEntryPath(filepath) {
- return path.resolve(this._settings.cwd, filepath);
- }
- _makeEntry(stats, pattern) {
- const entry = {
- name: pattern,
- path: pattern,
- dirent: utils.fs.createDirentFromStats(pattern, stats)
- };
- if (this._settings.stats) {
- entry.stats = stats;
- }
- return entry;
- }
- _isFatalError(error) {
- return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
- }
-}
-exports.default = Reader;
+ _final(callback) {
+ if (this.destroyed) {
+ return;
+ }
+ this.flushHeaders();
-/***/ }),
+ const callEnd = () => {
+ // For GET, HEAD and DELETE
+ if (this._mustNotHaveABody) {
+ callback();
+ return;
+ }
-/***/ 2083:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this._request.end(callback);
+ };
-"use strict";
+ if (this._request) {
+ callEnd();
+ } else {
+ this[kJobs].push(callEnd);
+ }
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const stream_1 = __nccwpck_require__(2413);
-const fsStat = __nccwpck_require__(109);
-const fsWalk = __nccwpck_require__(6026);
-const reader_1 = __nccwpck_require__(8062);
-class ReaderStream extends reader_1.default {
- constructor() {
- super(...arguments);
- this._walkStream = fsWalk.walkStream;
- this._stat = fsStat.stat;
- }
- dynamic(root, options) {
- return this._walkStream(root, options);
- }
- static(patterns, options) {
- const filepaths = patterns.map(this._getFullEntryPath, this);
- const stream = new stream_1.PassThrough({ objectMode: true });
- stream._write = (index, _enc, done) => {
- return this._getEntry(filepaths[index], patterns[index], options)
- .then((entry) => {
- if (entry !== null && options.entryFilter(entry)) {
- stream.push(entry);
- }
- if (index === filepaths.length - 1) {
- stream.end();
- }
- done();
- })
- .catch(done);
- };
- for (let i = 0; i < filepaths.length; i++) {
- stream.write(i);
- }
- return stream;
- }
- _getEntry(filepath, pattern, options) {
- return this._getStat(filepath)
- .then((stats) => this._makeEntry(stats, pattern))
- .catch((error) => {
- if (options.errorFilter(error)) {
- return null;
- }
- throw error;
- });
- }
- _getStat(filepath) {
- return new Promise((resolve, reject) => {
- this._stat(filepath, this._fsStatSettings, (error, stats) => {
- return error === null ? resolve(stats) : reject(error);
- });
- });
- }
-}
-exports.default = ReaderStream;
+ abort() {
+ if (this.res && this.res.complete) {
+ return;
+ }
+ if (!this.aborted) {
+ process.nextTick(() => this.emit('abort'));
+ }
-/***/ }),
+ this.aborted = true;
-/***/ 8821:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this.destroy();
+ }
-"use strict";
+ _destroy(error, callback) {
+ if (this.res) {
+ this.res._dump();
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const fsStat = __nccwpck_require__(109);
-const fsWalk = __nccwpck_require__(6026);
-const reader_1 = __nccwpck_require__(8062);
-class ReaderSync extends reader_1.default {
- constructor() {
- super(...arguments);
- this._walkSync = fsWalk.walkSync;
- this._statSync = fsStat.statSync;
- }
- dynamic(root, options) {
- return this._walkSync(root, options);
- }
- static(patterns, options) {
- const entries = [];
- for (const pattern of patterns) {
- const filepath = this._getFullEntryPath(pattern);
- const entry = this._getEntry(filepath, pattern, options);
- if (entry === null || !options.entryFilter(entry)) {
- continue;
- }
- entries.push(entry);
- }
- return entries;
- }
- _getEntry(filepath, pattern, options) {
- try {
- const stats = this._getStat(filepath);
- return this._makeEntry(stats, pattern);
- }
- catch (error) {
- if (options.errorFilter(error)) {
- return null;
- }
- throw error;
- }
- }
- _getStat(filepath) {
- return this._statSync(filepath, this._fsStatSettings);
- }
-}
-exports.default = ReaderSync;
+ if (this._request) {
+ this._request.destroy();
+ }
+ callback(error);
+ }
-/***/ }),
+ async flushHeaders() {
+ if (this[kFlushedHeaders] || this.destroyed) {
+ return;
+ }
-/***/ 952:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this[kFlushedHeaders] = true;
-"use strict";
+ const isConnectMethod = this.method === HTTP2_METHOD_CONNECT;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
-const fs = __nccwpck_require__(5747);
-const os = __nccwpck_require__(2087);
-/**
- * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
- * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
- */
-const CPU_COUNT = Math.max(os.cpus().length, 1);
-exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
- lstat: fs.lstat,
- lstatSync: fs.lstatSync,
- stat: fs.stat,
- statSync: fs.statSync,
- readdir: fs.readdir,
- readdirSync: fs.readdirSync
-};
-class Settings {
- constructor(_options = {}) {
- this._options = _options;
- this.absolute = this._getValue(this._options.absolute, false);
- this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
- this.braceExpansion = this._getValue(this._options.braceExpansion, true);
- this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
- this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
- this.cwd = this._getValue(this._options.cwd, process.cwd());
- this.deep = this._getValue(this._options.deep, Infinity);
- this.dot = this._getValue(this._options.dot, false);
- this.extglob = this._getValue(this._options.extglob, true);
- this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
- this.fs = this._getFileSystemMethods(this._options.fs);
- this.globstar = this._getValue(this._options.globstar, true);
- this.ignore = this._getValue(this._options.ignore, []);
- this.markDirectories = this._getValue(this._options.markDirectories, false);
- this.objectMode = this._getValue(this._options.objectMode, false);
- this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
- this.onlyFiles = this._getValue(this._options.onlyFiles, true);
- this.stats = this._getValue(this._options.stats, false);
- this.suppressErrors = this._getValue(this._options.suppressErrors, false);
- this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
- this.unique = this._getValue(this._options.unique, true);
- if (this.onlyDirectories) {
- this.onlyFiles = false;
- }
- if (this.stats) {
- this.objectMode = true;
- }
- }
- _getValue(option, value) {
- return option === undefined ? value : option;
- }
- _getFileSystemMethods(methods = {}) {
- return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
- }
-}
-exports.default = Settings;
+ // The real magic is here
+ const onStream = stream => {
+ this._request = stream;
+ if (this.destroyed) {
+ stream.destroy();
+ return;
+ }
-/***/ }),
+ // Forwards `timeout`, `continue`, `close` and `error` events to this instance.
+ if (!isConnectMethod) {
+ proxyEvents(stream, this, ['timeout', 'continue', 'close', 'error']);
+ }
-/***/ 5325:
-/***/ ((__unused_webpack_module, exports) => {
+ // Wait for the `finish` event. We don't want to emit the `response` event
+ // before `request.end()` is called.
+ const waitForEnd = fn => {
+ return (...args) => {
+ if (!this.writable && !this.destroyed) {
+ fn(...args);
+ } else {
+ this.once('finish', () => {
+ fn(...args);
+ });
+ }
+ };
+ };
-"use strict";
+ // This event tells we are ready to listen for the data.
+ stream.once('response', waitForEnd((headers, flags, rawHeaders) => {
+ // If we were to emit raw request stream, it would be as fast as the native approach.
+ // Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it).
+ const response = new IncomingMessage(this.socket, stream.readableHighWaterMark);
+ this.res = response;
+
+ response.req = this;
+ response.statusCode = headers[HTTP2_HEADER_STATUS];
+ response.headers = headers;
+ response.rawHeaders = rawHeaders;
+
+ response.once('end', () => {
+ if (this.aborted) {
+ response.aborted = true;
+ response.emit('aborted');
+ } else {
+ response.complete = true;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.splitWhen = exports.flatten = void 0;
-function flatten(items) {
- return items.reduce((collection, item) => [].concat(collection, item), []);
-}
-exports.flatten = flatten;
-function splitWhen(items, predicate) {
- const result = [[]];
- let groupIndex = 0;
- for (const item of items) {
- if (predicate(item)) {
- groupIndex++;
- result[groupIndex] = [];
- }
- else {
- result[groupIndex].push(item);
- }
- }
- return result;
-}
-exports.splitWhen = splitWhen;
+ // Has no effect, just be consistent with the Node.js behavior
+ response.socket = null;
+ response.connection = null;
+ }
+ });
+ if (isConnectMethod) {
+ response.upgrade = true;
-/***/ }),
+ // The HTTP1 API says the socket is detached here,
+ // but we can't do that so we pass the original HTTP2 request.
+ if (this.emit('connect', response, stream, Buffer.alloc(0))) {
+ this.emit('close');
+ } else {
+ // No listeners attached, destroy the original request.
+ stream.destroy();
+ }
+ } else {
+ // Forwards data
+ stream.on('data', chunk => {
+ if (!response._dumped && !response.push(chunk)) {
+ stream.pause();
+ }
+ });
-/***/ 1230:
-/***/ ((__unused_webpack_module, exports) => {
+ stream.once('end', () => {
+ response.push(null);
+ });
-"use strict";
+ if (!this.emit('response', response)) {
+ // No listeners attached, dump the response.
+ response._dump();
+ }
+ }
+ }));
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isEnoentCodeError = void 0;
-function isEnoentCodeError(error) {
- return error.code === 'ENOENT';
-}
-exports.isEnoentCodeError = isEnoentCodeError;
+ // Emits `information` event
+ stream.once('headers', waitForEnd(
+ headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]})
+ ));
+ stream.once('trailers', waitForEnd((trailers, flags, rawTrailers) => {
+ const {res} = this;
-/***/ }),
+ // Assigns trailers to the response object.
+ res.trailers = trailers;
+ res.rawTrailers = rawTrailers;
+ }));
-/***/ 7543:
-/***/ ((__unused_webpack_module, exports) => {
+ const {socket} = stream.session;
+ this.socket = socket;
+ this.connection = socket;
-"use strict";
+ for (const job of this[kJobs]) {
+ job();
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.createDirentFromStats = void 0;
-class DirentFromStats {
- constructor(name, stats) {
- this.name = name;
- this.isBlockDevice = stats.isBlockDevice.bind(stats);
- this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
- this.isDirectory = stats.isDirectory.bind(stats);
- this.isFIFO = stats.isFIFO.bind(stats);
- this.isFile = stats.isFile.bind(stats);
- this.isSocket = stats.isSocket.bind(stats);
- this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
- }
-}
-function createDirentFromStats(name, stats) {
- return new DirentFromStats(name, stats);
-}
-exports.createDirentFromStats = createDirentFromStats;
+ this.emit('socket', this.socket);
+ };
+ // Makes a HTTP2 request
+ if (this[kSession]) {
+ try {
+ onStream(this[kSession].request(this[kHeaders]));
+ } catch (error) {
+ this.emit('error', error);
+ }
+ } else {
+ this.reusedSocket = true;
-/***/ }),
+ try {
+ onStream(await this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]));
+ } catch (error) {
+ this.emit('error', error);
+ }
+ }
+ }
-/***/ 5444:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ getHeader(name) {
+ if (typeof name !== 'string') {
+ throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
+ }
-"use strict";
+ return this[kHeaders][name.toLowerCase()];
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
-const array = __nccwpck_require__(5325);
-exports.array = array;
-const errno = __nccwpck_require__(1230);
-exports.errno = errno;
-const fs = __nccwpck_require__(7543);
-exports.fs = fs;
-const path = __nccwpck_require__(3873);
-exports.path = path;
-const pattern = __nccwpck_require__(1221);
-exports.pattern = pattern;
-const stream = __nccwpck_require__(8382);
-exports.stream = stream;
-const string = __nccwpck_require__(2203);
-exports.string = string;
+ get headersSent() {
+ return this[kFlushedHeaders];
+ }
+ removeHeader(name) {
+ if (typeof name !== 'string') {
+ throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
+ }
-/***/ }),
+ if (this.headersSent) {
+ throw new ERR_HTTP_HEADERS_SENT('remove');
+ }
-/***/ 3873:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ delete this[kHeaders][name.toLowerCase()];
+ }
-"use strict";
+ setHeader(name, value) {
+ if (this.headersSent) {
+ throw new ERR_HTTP_HEADERS_SENT('set');
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
-const path = __nccwpck_require__(5622);
-const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
-const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
-/**
- * Designed to work only with simple paths: `dir\\file`.
- */
-function unixify(filepath) {
- return filepath.replace(/\\/g, '/');
-}
-exports.unixify = unixify;
-function makeAbsolute(cwd, filepath) {
- return path.resolve(cwd, filepath);
-}
-exports.makeAbsolute = makeAbsolute;
-function escape(pattern) {
- return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
-}
-exports.escape = escape;
-function removeLeadingDotSegment(entry) {
- // We do not use `startsWith` because this is 10x slower than current implementation for some cases.
- // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
- if (entry.charAt(0) === '.') {
- const secondCharactery = entry.charAt(1);
- if (secondCharactery === '/' || secondCharactery === '\\') {
- return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
- }
- }
- return entry;
-}
-exports.removeLeadingDotSegment = removeLeadingDotSegment;
+ if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) {
+ throw new ERR_INVALID_HTTP_TOKEN('Header name', name);
+ }
+ if (typeof value === 'undefined') {
+ throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name);
+ }
-/***/ }),
+ if (isInvalidHeaderValue.test(value)) {
+ throw new ERR_INVALID_CHAR('header content', name);
+ }
-/***/ 1221:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this[kHeaders][name.toLowerCase()] = value;
+ }
-"use strict";
+ setNoDelay() {
+ // HTTP2 sockets cannot be malformed, do nothing.
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
-const path = __nccwpck_require__(5622);
-const globParent = __nccwpck_require__(4655);
-const micromatch = __nccwpck_require__(3913);
-const picomatch = __nccwpck_require__(8569);
-const GLOBSTAR = '**';
-const ESCAPE_SYMBOL = '\\';
-const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
-const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/;
-const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/;
-const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/;
-const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/;
-function isStaticPattern(pattern, options = {}) {
- return !isDynamicPattern(pattern, options);
-}
-exports.isStaticPattern = isStaticPattern;
-function isDynamicPattern(pattern, options = {}) {
- /**
- * A special case with an empty string is necessary for matching patterns that start with a forward slash.
- * An empty string cannot be a dynamic pattern.
- * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
- */
- if (pattern === '') {
- return false;
- }
- /**
- * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
- * filepath directly (without read directory).
- */
- if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
- return true;
- }
- if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
- return true;
- }
- if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
- return true;
- }
- if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {
- return true;
- }
- return false;
-}
-exports.isDynamicPattern = isDynamicPattern;
-function convertToPositivePattern(pattern) {
- return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
-}
-exports.convertToPositivePattern = convertToPositivePattern;
-function convertToNegativePattern(pattern) {
- return '!' + pattern;
-}
-exports.convertToNegativePattern = convertToNegativePattern;
-function isNegativePattern(pattern) {
- return pattern.startsWith('!') && pattern[1] !== '(';
-}
-exports.isNegativePattern = isNegativePattern;
-function isPositivePattern(pattern) {
- return !isNegativePattern(pattern);
-}
-exports.isPositivePattern = isPositivePattern;
-function getNegativePatterns(patterns) {
- return patterns.filter(isNegativePattern);
-}
-exports.getNegativePatterns = getNegativePatterns;
-function getPositivePatterns(patterns) {
- return patterns.filter(isPositivePattern);
-}
-exports.getPositivePatterns = getPositivePatterns;
-function getBaseDirectory(pattern) {
- return globParent(pattern, { flipBackslashes: false });
-}
-exports.getBaseDirectory = getBaseDirectory;
-function hasGlobStar(pattern) {
- return pattern.includes(GLOBSTAR);
-}
-exports.hasGlobStar = hasGlobStar;
-function endsWithSlashGlobStar(pattern) {
- return pattern.endsWith('/' + GLOBSTAR);
-}
-exports.endsWithSlashGlobStar = endsWithSlashGlobStar;
-function isAffectDepthOfReadingPattern(pattern) {
- const basename = path.basename(pattern);
- return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
-}
-exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
-function expandPatternsWithBraceExpansion(patterns) {
- return patterns.reduce((collection, pattern) => {
- return collection.concat(expandBraceExpansion(pattern));
- }, []);
-}
-exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
-function expandBraceExpansion(pattern) {
- return micromatch.braces(pattern, {
- expand: true,
- nodupes: true
- });
-}
-exports.expandBraceExpansion = expandBraceExpansion;
-function getPatternParts(pattern, options) {
- let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
- /**
- * The scan method returns an empty array in some cases.
- * See micromatch/picomatch#58 for more details.
- */
- if (parts.length === 0) {
- parts = [pattern];
- }
- /**
- * The scan method does not return an empty part for the pattern with a forward slash.
- * This is another part of micromatch/picomatch#58.
- */
- if (parts[0].startsWith('/')) {
- parts[0] = parts[0].slice(1);
- parts.unshift('');
- }
- return parts;
-}
-exports.getPatternParts = getPatternParts;
-function makeRe(pattern, options) {
- return micromatch.makeRe(pattern, options);
-}
-exports.makeRe = makeRe;
-function convertPatternsToRe(patterns, options) {
- return patterns.map((pattern) => makeRe(pattern, options));
-}
-exports.convertPatternsToRe = convertPatternsToRe;
-function matchAny(entry, patternsRe) {
- return patternsRe.some((patternRe) => patternRe.test(entry));
+ setSocketKeepAlive() {
+ // HTTP2 sockets cannot be malformed, do nothing.
+ }
+
+ setTimeout(ms, callback) {
+ const applyTimeout = () => this._request.setTimeout(ms, callback);
+
+ if (this._request) {
+ applyTimeout();
+ } else {
+ this[kJobs].push(applyTimeout);
+ }
+
+ return this;
+ }
+
+ get maxHeadersCount() {
+ if (!this.destroyed && this._request) {
+ return this._request.session.localSettings.maxHeaderListSize;
+ }
+
+ return undefined;
+ }
+
+ set maxHeadersCount(_value) {
+ // Updating HTTP2 settings would affect all requests, do nothing.
+ }
}
-exports.matchAny = matchAny;
+
+module.exports = ClientRequest;
/***/ }),
-/***/ 8382:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 2575:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.merge = void 0;
-const merge2 = __nccwpck_require__(2578);
-function merge(streams) {
- const mergedStream = merge2(streams);
- streams.forEach((stream) => {
- stream.once('error', (error) => mergedStream.emit('error', error));
- });
- mergedStream.once('close', () => propagateCloseEventToSources(streams));
- mergedStream.once('end', () => propagateCloseEventToSources(streams));
- return mergedStream;
-}
-exports.merge = merge;
-function propagateCloseEventToSources(streams) {
- streams.forEach((stream) => stream.emit('close'));
-}
+const {Readable} = __nccwpck_require__(2413);
+class IncomingMessage extends Readable {
+ constructor(socket, highWaterMark) {
+ super({
+ highWaterMark,
+ autoDestroy: false
+ });
-/***/ }),
+ this.statusCode = null;
+ this.statusMessage = '';
+ this.httpVersion = '2.0';
+ this.httpVersionMajor = 2;
+ this.httpVersionMinor = 0;
+ this.headers = {};
+ this.trailers = {};
+ this.req = null;
-/***/ 2203:
-/***/ ((__unused_webpack_module, exports) => {
+ this.aborted = false;
+ this.complete = false;
+ this.upgrade = null;
-"use strict";
+ this.rawHeaders = [];
+ this.rawTrailers = [];
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isEmpty = exports.isString = void 0;
-function isString(input) {
- return typeof input === 'string';
-}
-exports.isString = isString;
-function isEmpty(input) {
- return input === '';
+ this.socket = socket;
+ this.connection = socket;
+
+ this._dumped = false;
+ }
+
+ _destroy(error) {
+ this.req._request.destroy(error);
+ }
+
+ setTimeout(ms, callback) {
+ this.req.setTimeout(ms, callback);
+ return this;
+ }
+
+ _dump() {
+ if (!this._dumped) {
+ this._dumped = true;
+
+ this.removeAllListeners('data');
+ this.resume();
+ }
+ }
+
+ _read() {
+ if (this.req) {
+ this.req._request.resume();
+ }
+ }
}
-exports.isEmpty = isEmpty;
+
+module.exports = IncomingMessage;
/***/ }),
-/***/ 7340:
+/***/ 4645:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+const http2 = __nccwpck_require__(7565);
+const agent = __nccwpck_require__(9898);
+const ClientRequest = __nccwpck_require__(9632);
+const IncomingMessage = __nccwpck_require__(2575);
+const auto = __nccwpck_require__(7167);
-var reusify = __nccwpck_require__(2113)
-
-function fastqueue (context, worker, concurrency) {
- if (typeof context === 'function') {
- concurrency = worker
- worker = context
- context = null
- }
-
- if (concurrency < 1) {
- throw new Error('fastqueue concurrency must be greater than 1')
- }
+const request = (url, options, callback) => {
+ return new ClientRequest(url, options, callback);
+};
- var cache = reusify(Task)
- var queueHead = null
- var queueTail = null
- var _running = 0
- var errorHandler = null
+const get = (url, options, callback) => {
+ // eslint-disable-next-line unicorn/prevent-abbreviations
+ const req = new ClientRequest(url, options, callback);
+ req.end();
- var self = {
- push: push,
- drain: noop,
- saturated: noop,
- pause: pause,
- paused: false,
- concurrency: concurrency,
- running: running,
- resume: resume,
- idle: idle,
- length: length,
- getQueue: getQueue,
- unshift: unshift,
- empty: noop,
- kill: kill,
- killAndDrain: killAndDrain,
- error: error
- }
+ return req;
+};
- return self
+module.exports = {
+ ...http2,
+ ClientRequest,
+ IncomingMessage,
+ ...agent,
+ request,
+ get,
+ auto
+};
- function running () {
- return _running
- }
- function pause () {
- self.paused = true
- }
+/***/ }),
- function length () {
- var current = queueHead
- var counter = 0
+/***/ 1982:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- while (current) {
- current = current.next
- counter++
- }
+"use strict";
- return counter
- }
+const net = __nccwpck_require__(1631);
+/* istanbul ignore file: https://github.com/nodejs/node/blob/v13.0.1/lib/_http_agent.js */
- function getQueue () {
- var current = queueHead
- var tasks = []
+module.exports = options => {
+ let servername = options.host;
+ const hostHeader = options.headers && options.headers.host;
- while (current) {
- tasks.push(current.value)
- current = current.next
- }
+ if (hostHeader) {
+ if (hostHeader.startsWith('[')) {
+ const index = hostHeader.indexOf(']');
+ if (index === -1) {
+ servername = hostHeader;
+ } else {
+ servername = hostHeader.slice(1, -1);
+ }
+ } else {
+ servername = hostHeader.split(':', 1)[0];
+ }
+ }
- return tasks
- }
+ if (net.isIP(servername)) {
+ return '';
+ }
- function resume () {
- if (!self.paused) return
- self.paused = false
- for (var i = 0; i < self.concurrency; i++) {
- _running++
- release()
- }
- }
+ return servername;
+};
- function idle () {
- return _running === 0 && self.length() === 0
- }
- function push (value, done) {
- var current = cache.get()
+/***/ }),
- current.context = context
- current.release = release
- current.value = value
- current.callback = done || noop
- current.errorHandler = errorHandler
+/***/ 7087:
+/***/ ((module) => {
- if (_running === self.concurrency || self.paused) {
- if (queueTail) {
- queueTail.next = current
- queueTail = current
- } else {
- queueHead = current
- queueTail = current
- self.saturated()
- }
- } else {
- _running++
- worker.call(context, current.value, current.worked)
- }
- }
+"use strict";
- function unshift (value, done) {
- var current = cache.get()
+/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */
- current.context = context
- current.release = release
- current.value = value
- current.callback = done || noop
+const makeError = (Base, key, getMessage) => {
+ module.exports[key] = class NodeError extends Base {
+ constructor(...args) {
+ super(typeof getMessage === 'string' ? getMessage : getMessage(args));
+ this.name = `${super.name} [${key}]`;
+ this.code = key;
+ }
+ };
+};
- if (_running === self.concurrency || self.paused) {
- if (queueHead) {
- current.next = queueHead
- queueHead = current
- } else {
- queueHead = current
- queueTail = current
- self.saturated()
- }
- } else {
- _running++
- worker.call(context, current.value, current.worked)
- }
- }
+makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => {
+ const type = args[0].includes('.') ? 'property' : 'argument';
- function release (holder) {
- if (holder) {
- cache.release(holder)
- }
- var next = queueHead
- if (next) {
- if (!self.paused) {
- if (queueTail === queueHead) {
- queueTail = null
- }
- queueHead = next.next
- next.next = null
- worker.call(context, next.value, next.worked)
- if (queueTail === null) {
- self.empty()
- }
- } else {
- _running--
- }
- } else if (--_running === 0) {
- self.drain()
- }
- }
+ let valid = args[1];
+ const isManyTypes = Array.isArray(valid);
- function kill () {
- queueHead = null
- queueTail = null
- self.drain = noop
- }
+ if (isManyTypes) {
+ valid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`;
+ }
- function killAndDrain () {
- queueHead = null
- queueTail = null
- self.drain()
- self.drain = noop
- }
+ return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`;
+});
- function error (handler) {
- errorHandler = handler
- }
-}
+makeError(TypeError, 'ERR_INVALID_PROTOCOL', args => {
+ return `Protocol "${args[0]}" not supported. Expected "${args[1]}"`;
+});
-function noop () {}
+makeError(Error, 'ERR_HTTP_HEADERS_SENT', args => {
+ return `Cannot ${args[0]} headers after they are sent to the client`;
+});
-function Task () {
- this.value = null
- this.callback = noop
- this.next = null
- this.release = noop
- this.context = null
- this.errorHandler = null
+makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => {
+ return `${args[0]} must be a valid HTTP token [${args[1]}]`;
+});
- var self = this
+makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => {
+ return `Invalid value "${args[0]} for header "${args[1]}"`;
+});
- this.worked = function worked (err, result) {
- var callback = self.callback
- var errorHandler = self.errorHandler
- var val = self.value
- self.value = null
- self.callback = noop
- if (self.errorHandler) {
- errorHandler(err, val)
- }
- callback.call(self.context, err, result)
- self.release(self)
- }
-}
+makeError(TypeError, 'ERR_INVALID_CHAR', args => {
+ return `Invalid character in ${args[0]} [${args[1]}]`;
+});
+
+
+/***/ }),
+
+/***/ 1199:
+/***/ ((module) => {
+
+"use strict";
-module.exports = fastqueue
+
+module.exports = header => {
+ switch (header) {
+ case ':method':
+ case ':scheme':
+ case ':authority':
+ case ':path':
+ return true;
+ default:
+ return false;
+ }
+};
/***/ }),
-/***/ 4655:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 1818:
+/***/ ((module) => {
"use strict";
-var isGlob = __nccwpck_require__(4466);
-var pathPosixDirname = __nccwpck_require__(5622).posix.dirname;
-var isWin32 = __nccwpck_require__(2087).platform() === 'win32';
+module.exports = (from, to, events) => {
+ for (const event of events) {
+ from.on(event, (...args) => to.emit(event, ...args));
+ }
+};
-var slash = '/';
-var backslash = /\\/g;
-var enclosure = /[\{\[].*[\/]*.*[\}\]]$/;
-var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
-var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
-/**
- * @param {string} str
- * @param {Object} opts
- * @param {boolean} [opts.flipBackslashes=true]
- */
-module.exports = function globParent(str, opts) {
- var options = Object.assign({ flipBackslashes: true }, opts);
+/***/ }),
- // flip windows path separators
- if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
- str = str.replace(backslash, slash);
- }
+/***/ 2686:
+/***/ ((module) => {
- // special case for strings ending in enclosure containing path separator
- if (enclosure.test(str)) {
- str += slash;
- }
+"use strict";
- // preserves full path in case of trailing path separator
- str += 'a';
+/* istanbul ignore file: https://github.com/nodejs/node/blob/a91293d4d9ab403046ab5eb022332e4e3d249bd3/lib/internal/url.js#L1257 */
+
+module.exports = url => {
+ const options = {
+ protocol: url.protocol,
+ hostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
+ host: url.host,
+ hash: url.hash,
+ search: url.search,
+ pathname: url.pathname,
+ href: url.href,
+ path: `${url.pathname || ''}${url.search || ''}`
+ };
- // remove path parts that are globby
- do {
- str = pathPosixDirname(str);
- } while (isGlob(str) || globby.test(str));
+ if (typeof url.port === 'string' && url.port.length !== 0) {
+ options.port = Number(url.port);
+ }
- // remove escape chars and return result
- return str.replace(escaped, '$1');
+ if (url.username || url.password) {
+ options.auth = `${url.username || ''}:${url.password || ''}`;
+ }
+
+ return options;
};
@@ -10602,6 +20088,208 @@ module.exports = function isGlob(str, options) {
};
+/***/ }),
+
+/***/ 2820:
+/***/ ((__unused_webpack_module, exports) => {
+
+//TODO: handle reviver/dehydrate function like normal
+//and handle indentation, like normal.
+//if anyone needs this... please send pull request.
+
+exports.stringify = function stringify (o) {
+ if('undefined' == typeof o) return o
+
+ if(o && Buffer.isBuffer(o))
+ return JSON.stringify(':base64:' + o.toString('base64'))
+
+ if(o && o.toJSON)
+ o = o.toJSON()
+
+ if(o && 'object' === typeof o) {
+ var s = ''
+ var array = Array.isArray(o)
+ s = array ? '[' : '{'
+ var first = true
+
+ for(var k in o) {
+ var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k])
+ if(Object.hasOwnProperty.call(o, k) && !ignore) {
+ if(!first)
+ s += ','
+ first = false
+ if (array) {
+ if(o[k] == undefined)
+ s += 'null'
+ else
+ s += stringify(o[k])
+ } else if (o[k] !== void(0)) {
+ s += stringify(k) + ':' + stringify(o[k])
+ }
+ }
+ }
+
+ s += array ? ']' : '}'
+
+ return s
+ } else if ('string' === typeof o) {
+ return JSON.stringify(/^:/.test(o) ? ':' + o : o)
+ } else if ('undefined' === typeof o) {
+ return 'null';
+ } else
+ return JSON.stringify(o)
+}
+
+exports.parse = function (s) {
+ return JSON.parse(s, function (key, value) {
+ if('string' === typeof value) {
+ if(/^:base64:/.test(value))
+ return Buffer.from(value.substring(8), 'base64')
+ else
+ return /^:/.test(value) ? value.substring(1) : value
+ }
+ return value
+ })
+}
+
+
+/***/ }),
+
+/***/ 1531:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const EventEmitter = __nccwpck_require__(8614);
+const JSONB = __nccwpck_require__(2820);
+
+const loadStore = opts => {
+ const adapters = {
+ redis: '@keyv/redis',
+ mongodb: '@keyv/mongo',
+ mongo: '@keyv/mongo',
+ sqlite: '@keyv/sqlite',
+ postgresql: '@keyv/postgres',
+ postgres: '@keyv/postgres',
+ mysql: '@keyv/mysql'
+ };
+ if (opts.adapter || opts.uri) {
+ const adapter = opts.adapter || /^[^:]*/.exec(opts.uri)[0];
+ return new (require(adapters[adapter]))(opts);
+ }
+
+ return new Map();
+};
+
+class Keyv extends EventEmitter {
+ constructor(uri, opts) {
+ super();
+ this.opts = Object.assign(
+ {
+ namespace: 'keyv',
+ serialize: JSONB.stringify,
+ deserialize: JSONB.parse
+ },
+ (typeof uri === 'string') ? { uri } : uri,
+ opts
+ );
+
+ if (!this.opts.store) {
+ const adapterOpts = Object.assign({}, this.opts);
+ this.opts.store = loadStore(adapterOpts);
+ }
+
+ if (typeof this.opts.store.on === 'function') {
+ this.opts.store.on('error', err => this.emit('error', err));
+ }
+
+ this.opts.store.namespace = this.opts.namespace;
+ }
+
+ _getKeyPrefix(key) {
+ return `${this.opts.namespace}:${key}`;
+ }
+
+ get(key, opts) {
+ const keyPrefixed = this._getKeyPrefix(key);
+ const { store } = this.opts;
+ return Promise.resolve()
+ .then(() => store.get(keyPrefixed))
+ .then(data => {
+ return (typeof data === 'string') ? this.opts.deserialize(data) : data;
+ })
+ .then(data => {
+ if (data === undefined) {
+ return undefined;
+ }
+
+ if (typeof data.expires === 'number' && Date.now() > data.expires) {
+ this.delete(key);
+ return undefined;
+ }
+
+ return (opts && opts.raw) ? data : data.value;
+ });
+ }
+
+ set(key, value, ttl) {
+ const keyPrefixed = this._getKeyPrefix(key);
+ if (typeof ttl === 'undefined') {
+ ttl = this.opts.ttl;
+ }
+
+ if (ttl === 0) {
+ ttl = undefined;
+ }
+
+ const { store } = this.opts;
+
+ return Promise.resolve()
+ .then(() => {
+ const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null;
+ value = { value, expires };
+ return this.opts.serialize(value);
+ })
+ .then(value => store.set(keyPrefixed, value, ttl))
+ .then(() => true);
+ }
+
+ delete(key) {
+ const keyPrefixed = this._getKeyPrefix(key);
+ const { store } = this.opts;
+ return Promise.resolve()
+ .then(() => store.delete(keyPrefixed));
+ }
+
+ clear() {
+ const { store } = this.opts;
+ return Promise.resolve()
+ .then(() => store.clear());
+ }
+}
+
+module.exports = Keyv;
+
+
+/***/ }),
+
+/***/ 9662:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = object => {
+ const result = {};
+
+ for (const [key, value] of Object.entries(object)) {
+ result[key.toLowerCase()] = value;
+ }
+
+ return result;
+};
+
+
/***/ }),
/***/ 2578:
@@ -10754,6 +20442,46 @@ function pauseStreams (streams, options) {
}
+/***/ }),
+
+/***/ 2610:
+/***/ ((module) => {
+
+"use strict";
+
+
+// We define these manually to ensure they're always copied
+// even if they would move up the prototype chain
+// https://nodejs.org/api/http.html#http_class_http_incomingmessage
+const knownProps = [
+ 'destroy',
+ 'setTimeout',
+ 'socket',
+ 'headers',
+ 'trailers',
+ 'rawHeaders',
+ 'statusCode',
+ 'httpVersion',
+ 'httpVersionMinor',
+ 'httpVersionMajor',
+ 'rawTrailers',
+ 'statusMessage'
+];
+
+module.exports = (fromStream, toStream) => {
+ const fromProps = new Set(Object.keys(fromStream).concat(knownProps));
+
+ for (const prop of fromProps) {
+ // Don't overwrite existing properties
+ if (prop in toStream) {
+ continue;
+ }
+
+ toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];
+ }
+};
+
+
/***/ }),
/***/ 467:
@@ -12373,42 +22101,271 @@ function fetch(url, opts) {
return;
}
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
+
+
+/***/ }),
+
+/***/ 7952:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+// TODO: Use the `URL` global when targeting Node.js 10
+const URLParser = typeof URL === 'undefined' ? __nccwpck_require__(8835).URL : URL;
+
+// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';
+const DATA_URL_DEFAULT_CHARSET = 'us-ascii';
+
+const testParameter = (name, filters) => {
+ return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
+};
+
+const normalizeDataURL = (urlString, {stripHash}) => {
+ const parts = urlString.match(/^data:(.*?),(.*?)(?:#(.*))?$/);
+
+ if (!parts) {
+ throw new Error(`Invalid URL: ${urlString}`);
+ }
+
+ const mediaType = parts[1].split(';');
+ const body = parts[2];
+ const hash = stripHash ? '' : parts[3];
+
+ let base64 = false;
+
+ if (mediaType[mediaType.length - 1] === 'base64') {
+ mediaType.pop();
+ base64 = true;
+ }
+
+ // Lowercase MIME type
+ const mimeType = (mediaType.shift() || '').toLowerCase();
+ const attributes = mediaType
+ .map(attribute => {
+ let [key, value = ''] = attribute.split('=').map(string => string.trim());
+
+ // Lowercase `charset`
+ if (key === 'charset') {
+ value = value.toLowerCase();
+
+ if (value === DATA_URL_DEFAULT_CHARSET) {
+ return '';
+ }
+ }
+
+ return `${key}${value ? `=${value}` : ''}`;
+ })
+ .filter(Boolean);
+
+ const normalizedMediaType = [
+ ...attributes
+ ];
+
+ if (base64) {
+ normalizedMediaType.push('base64');
+ }
+
+ if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {
+ normalizedMediaType.unshift(mimeType);
+ }
+
+ return `data:${normalizedMediaType.join(';')},${base64 ? body.trim() : body}${hash ? `#${hash}` : ''}`;
+};
+
+const normalizeUrl = (urlString, options) => {
+ options = {
+ defaultProtocol: 'http:',
+ normalizeProtocol: true,
+ forceHttp: false,
+ forceHttps: false,
+ stripAuthentication: true,
+ stripHash: false,
+ stripWWW: true,
+ removeQueryParameters: [/^utm_\w+/i],
+ removeTrailingSlash: true,
+ removeDirectoryIndex: false,
+ sortQueryParameters: true,
+ ...options
+ };
+
+ // TODO: Remove this at some point in the future
+ if (Reflect.has(options, 'normalizeHttps')) {
+ throw new Error('options.normalizeHttps is renamed to options.forceHttp');
+ }
+
+ if (Reflect.has(options, 'normalizeHttp')) {
+ throw new Error('options.normalizeHttp is renamed to options.forceHttps');
+ }
+
+ if (Reflect.has(options, 'stripFragment')) {
+ throw new Error('options.stripFragment is renamed to options.stripHash');
+ }
+
+ urlString = urlString.trim();
+
+ // Data URL
+ if (/^data:/i.test(urlString)) {
+ return normalizeDataURL(urlString, options);
+ }
+
+ const hasRelativeProtocol = urlString.startsWith('//');
+ const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
+
+ // Prepend protocol
+ if (!isRelativeUrl) {
+ urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
+ }
+
+ const urlObj = new URLParser(urlString);
+
+ if (options.forceHttp && options.forceHttps) {
+ throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
+ }
+
+ if (options.forceHttp && urlObj.protocol === 'https:') {
+ urlObj.protocol = 'http:';
+ }
+
+ if (options.forceHttps && urlObj.protocol === 'http:') {
+ urlObj.protocol = 'https:';
+ }
+
+ // Remove auth
+ if (options.stripAuthentication) {
+ urlObj.username = '';
+ urlObj.password = '';
+ }
+
+ // Remove hash
+ if (options.stripHash) {
+ urlObj.hash = '';
+ }
+
+ // Remove duplicate slashes if not preceded by a protocol
+ if (urlObj.pathname) {
+ // TODO: Use the following instead when targeting Node.js 10
+ // `urlObj.pathname = urlObj.pathname.replace(/(? {
+ if (/^(?!\/)/g.test(p1)) {
+ return `${p1}/`;
+ }
+
+ return '/';
+ });
+ }
+
+ // Decode URI octets
+ if (urlObj.pathname) {
+ urlObj.pathname = decodeURI(urlObj.pathname);
+ }
+
+ // Remove directory index
+ if (options.removeDirectoryIndex === true) {
+ options.removeDirectoryIndex = [/^index\.[a-z]+$/];
+ }
+
+ if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {
+ let pathComponents = urlObj.pathname.split('/');
+ const lastComponent = pathComponents[pathComponents.length - 1];
+
+ if (testParameter(lastComponent, options.removeDirectoryIndex)) {
+ pathComponents = pathComponents.slice(0, pathComponents.length - 1);
+ urlObj.pathname = pathComponents.slice(1).join('/') + '/';
+ }
+ }
+
+ if (urlObj.hostname) {
+ // Remove trailing dot
+ urlObj.hostname = urlObj.hostname.replace(/\.$/, '');
+
+ // Remove `www.`
+ if (options.stripWWW && /^www\.([a-z\-\d]{2,63})\.([a-z.]{2,5})$/.test(urlObj.hostname)) {
+ // Each label should be max 63 at length (min: 2).
+ // The extension should be max 5 at length (min: 2).
+ // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
+ urlObj.hostname = urlObj.hostname.replace(/^www\./, '');
+ }
+ }
+
+ // Remove query unwanted parameters
+ if (Array.isArray(options.removeQueryParameters)) {
+ for (const key of [...urlObj.searchParams.keys()]) {
+ if (testParameter(key, options.removeQueryParameters)) {
+ urlObj.searchParams.delete(key);
}
+ }
+ }
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
+ // Sort query parameters
+ if (options.sortQueryParameters) {
+ urlObj.searchParams.sort();
+ }
- writeToStream(req, request);
- });
-}
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
+ if (options.removeTrailingSlash) {
+ urlObj.pathname = urlObj.pathname.replace(/\/$/, '');
+ }
-// expose Promise
-fetch.Promise = global.Promise;
+ // Take advantage of many of the Node `url` normalizations
+ urlString = urlObj.toString();
-module.exports = exports = fetch;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.default = exports;
-exports.Headers = Headers;
-exports.Request = Request;
-exports.Response = Response;
-exports.FetchError = FetchError;
+ // Remove ending `/`
+ if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '') {
+ urlString = urlString.replace(/\/$/, '');
+ }
+
+ // Restore relative protocol, if applicable
+ if (hasRelativeProtocol && !options.normalizeProtocol) {
+ urlString = urlString.replace(/^http:\/\//, '//');
+ }
+
+ // Remove http/https
+ if (options.stripProtocol) {
+ urlString = urlString.replace(/^(?:https?:)?\/\//, '');
+ }
+
+ return urlString;
+};
+
+module.exports = normalizeUrl;
+// TODO: Remove this for the next major release
+module.exports.default = normalizeUrl;
/***/ }),
@@ -12460,6 +22417,121 @@ function onceStrict (fn) {
}
+/***/ }),
+
+/***/ 9072:
+/***/ ((module) => {
+
+"use strict";
+
+
+class CancelError extends Error {
+ constructor(reason) {
+ super(reason || 'Promise was canceled');
+ this.name = 'CancelError';
+ }
+
+ get isCanceled() {
+ return true;
+ }
+}
+
+class PCancelable {
+ static fn(userFn) {
+ return (...arguments_) => {
+ return new PCancelable((resolve, reject, onCancel) => {
+ arguments_.push(onCancel);
+ // eslint-disable-next-line promise/prefer-await-to-then
+ userFn(...arguments_).then(resolve, reject);
+ });
+ };
+ }
+
+ constructor(executor) {
+ this._cancelHandlers = [];
+ this._isPending = true;
+ this._isCanceled = false;
+ this._rejectOnCancel = true;
+
+ this._promise = new Promise((resolve, reject) => {
+ this._reject = reject;
+
+ const onResolve = value => {
+ this._isPending = false;
+ resolve(value);
+ };
+
+ const onReject = error => {
+ this._isPending = false;
+ reject(error);
+ };
+
+ const onCancel = handler => {
+ if (!this._isPending) {
+ throw new Error('The `onCancel` handler was attached after the promise settled.');
+ }
+
+ this._cancelHandlers.push(handler);
+ };
+
+ Object.defineProperties(onCancel, {
+ shouldReject: {
+ get: () => this._rejectOnCancel,
+ set: boolean => {
+ this._rejectOnCancel = boolean;
+ }
+ }
+ });
+
+ return executor(onResolve, onReject, onCancel);
+ });
+ }
+
+ then(onFulfilled, onRejected) {
+ // eslint-disable-next-line promise/prefer-await-to-then
+ return this._promise.then(onFulfilled, onRejected);
+ }
+
+ catch(onRejected) {
+ return this._promise.catch(onRejected);
+ }
+
+ finally(onFinally) {
+ return this._promise.finally(onFinally);
+ }
+
+ cancel(reason) {
+ if (!this._isPending || this._isCanceled) {
+ return;
+ }
+
+ if (this._cancelHandlers.length > 0) {
+ try {
+ for (const handler of this._cancelHandlers) {
+ handler();
+ }
+ } catch (error) {
+ this._reject(error);
+ }
+ }
+
+ this._isCanceled = true;
+ if (this._rejectOnCancel) {
+ this._reject(new CancelError(reason));
+ }
+ }
+
+ get isCanceled() {
+ return this._isCanceled;
+ }
+}
+
+Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
+
+module.exports = PCancelable;
+module.exports.CancelError = CancelError;
+
+
/***/ }),
/***/ 8569:
@@ -14554,6 +24626,292 @@ exports.wrapOutput = (input, state = {}, options = {}) => {
};
+/***/ }),
+
+/***/ 8341:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var once = __nccwpck_require__(1223)
+var eos = __nccwpck_require__(1205)
+var fs = __nccwpck_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes
+
+var noop = function () {}
+var ancient = /^v?\.0/.test(process.version)
+
+var isFn = function (fn) {
+ return typeof fn === 'function'
+}
+
+var isFS = function (stream) {
+ if (!ancient) return false // newer node version do not need to care about fs is a special way
+ if (!fs) return false // browser
+ return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
+}
+
+var isRequest = function (stream) {
+ return stream.setHeader && isFn(stream.abort)
+}
+
+var destroyer = function (stream, reading, writing, callback) {
+ callback = once(callback)
+
+ var closed = false
+ stream.on('close', function () {
+ closed = true
+ })
+
+ eos(stream, {readable: reading, writable: writing}, function (err) {
+ if (err) return callback(err)
+ closed = true
+ callback()
+ })
+
+ var destroyed = false
+ return function (err) {
+ if (closed) return
+ if (destroyed) return
+ destroyed = true
+
+ if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
+ if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
+
+ if (isFn(stream.destroy)) return stream.destroy()
+
+ callback(err || new Error('stream was destroyed'))
+ }
+}
+
+var call = function (fn) {
+ fn()
+}
+
+var pipe = function (from, to) {
+ return from.pipe(to)
+}
+
+var pump = function () {
+ var streams = Array.prototype.slice.call(arguments)
+ var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
+
+ if (Array.isArray(streams[0])) streams = streams[0]
+ if (streams.length < 2) throw new Error('pump requires two streams per minimum')
+
+ var error
+ var destroys = streams.map(function (stream, i) {
+ var reading = i < streams.length - 1
+ var writing = i > 0
+ return destroyer(stream, reading, writing, function (err) {
+ if (!error) error = err
+ if (err) destroys.forEach(call)
+ if (reading) return
+ destroys.forEach(call)
+ callback(error)
+ })
+ })
+
+ return streams.reduce(pipe)
+}
+
+module.exports = pump
+
+
+/***/ }),
+
+/***/ 9273:
+/***/ ((module) => {
+
+"use strict";
+
+
+class QuickLRU {
+ constructor(options = {}) {
+ if (!(options.maxSize && options.maxSize > 0)) {
+ throw new TypeError('`maxSize` must be a number greater than 0');
+ }
+
+ this.maxSize = options.maxSize;
+ this.onEviction = options.onEviction;
+ this.cache = new Map();
+ this.oldCache = new Map();
+ this._size = 0;
+ }
+
+ _set(key, value) {
+ this.cache.set(key, value);
+ this._size++;
+
+ if (this._size >= this.maxSize) {
+ this._size = 0;
+
+ if (typeof this.onEviction === 'function') {
+ for (const [key, value] of this.oldCache.entries()) {
+ this.onEviction(key, value);
+ }
+ }
+
+ this.oldCache = this.cache;
+ this.cache = new Map();
+ }
+ }
+
+ get(key) {
+ if (this.cache.has(key)) {
+ return this.cache.get(key);
+ }
+
+ if (this.oldCache.has(key)) {
+ const value = this.oldCache.get(key);
+ this.oldCache.delete(key);
+ this._set(key, value);
+ return value;
+ }
+ }
+
+ set(key, value) {
+ if (this.cache.has(key)) {
+ this.cache.set(key, value);
+ } else {
+ this._set(key, value);
+ }
+
+ return this;
+ }
+
+ has(key) {
+ return this.cache.has(key) || this.oldCache.has(key);
+ }
+
+ peek(key) {
+ if (this.cache.has(key)) {
+ return this.cache.get(key);
+ }
+
+ if (this.oldCache.has(key)) {
+ return this.oldCache.get(key);
+ }
+ }
+
+ delete(key) {
+ const deleted = this.cache.delete(key);
+ if (deleted) {
+ this._size--;
+ }
+
+ return this.oldCache.delete(key) || deleted;
+ }
+
+ clear() {
+ this.cache.clear();
+ this.oldCache.clear();
+ this._size = 0;
+ }
+
+ * keys() {
+ for (const [key] of this) {
+ yield key;
+ }
+ }
+
+ * values() {
+ for (const [, value] of this) {
+ yield value;
+ }
+ }
+
+ * [Symbol.iterator]() {
+ for (const item of this.cache) {
+ yield item;
+ }
+
+ for (const item of this.oldCache) {
+ const [key] = item;
+ if (!this.cache.has(key)) {
+ yield item;
+ }
+ }
+ }
+
+ get size() {
+ let oldCacheSize = 0;
+ for (const key of this.oldCache.keys()) {
+ if (!this.cache.has(key)) {
+ oldCacheSize++;
+ }
+ }
+
+ return Math.min(this._size + oldCacheSize, this.maxSize);
+ }
+}
+
+module.exports = QuickLRU;
+
+
+/***/ }),
+
+/***/ 6624:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const tls = __nccwpck_require__(4016);
+
+module.exports = (options = {}) => new Promise((resolve, reject) => {
+ const socket = tls.connect(options, () => {
+ if (options.resolveSocket) {
+ socket.off('error', reject);
+ resolve({alpnProtocol: socket.alpnProtocol, socket});
+ } else {
+ socket.destroy();
+ resolve({alpnProtocol: socket.alpnProtocol});
+ }
+ });
+
+ socket.on('error', reject);
+});
+
+
+/***/ }),
+
+/***/ 9004:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Readable = __nccwpck_require__(2413).Readable;
+const lowercaseKeys = __nccwpck_require__(9662);
+
+class Response extends Readable {
+ constructor(statusCode, headers, body, url) {
+ if (typeof statusCode !== 'number') {
+ throw new TypeError('Argument `statusCode` should be a number');
+ }
+ if (typeof headers !== 'object') {
+ throw new TypeError('Argument `headers` should be an object');
+ }
+ if (!(body instanceof Buffer)) {
+ throw new TypeError('Argument `body` should be a buffer');
+ }
+ if (typeof url !== 'string') {
+ throw new TypeError('Argument `url` should be a string');
+ }
+
+ super();
+ this.statusCode = statusCode;
+ this.headers = lowercaseKeys(headers);
+ this.body = body;
+ this.url = url;
+ }
+
+ _read() {
+ this.push(this.body);
+ this.push(null);
+ }
+}
+
+module.exports = Response;
+
+
/***/ }),
/***/ 2113:
@@ -21580,6 +31938,14 @@ function wrappy (fn, cb) {
module.exports = eval("require")("encoding");
+/***/ }),
+
+/***/ 2941:
+/***/ ((module) => {
+
+module.exports = eval("require")("original-fs");
+
+
/***/ }),
/***/ 2357:
@@ -21590,6 +31956,14 @@ module.exports = require("assert");;
/***/ }),
+/***/ 4293:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("buffer");;
+
+/***/ }),
+
/***/ 3129:
/***/ ((module) => {
@@ -21598,6 +31972,14 @@ module.exports = require("child_process");;
/***/ }),
+/***/ 881:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("dns");;
+
+/***/ }),
+
/***/ 8614:
/***/ ((module) => {
@@ -21622,6 +32004,14 @@ module.exports = require("http");;
/***/ }),
+/***/ 7565:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("http2");;
+
+/***/ }),
+
/***/ 7211:
/***/ ((module) => {
diff --git a/dist/index.js.map b/dist/index.js.map
index 4a7bc3e..ffa4d89 100644
--- a/dist/index.js.map
+++ b/dist/index.js.map
@@ -1 +1 @@
-{"version":3,"file":"index.js","sources":["../webpack://test-check/./lib/main.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-parser.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-types.js","../webpack://test-check/./lib/parsers/dotnet-trx/dotnet-trx-parser.js","../webpack://test-check/./lib/parsers/jest-junit/jest-junit-parser.js","../webpack://test-check/./lib/report/get-annotations.js","../webpack://test-check/./lib/report/get-report.js","../webpack://test-check/./lib/test-results.js","../webpack://test-check/./lib/utils/exec.js","../webpack://test-check/./lib/utils/file-utils.js","../webpack://test-check/./lib/utils/git.js","../webpack://test-check/./lib/utils/github-utils.js","../webpack://test-check/./lib/utils/markdown-utils.js","../webpack://test-check/./lib/utils/parse-utils.js","../webpack://test-check/./lib/utils/slugger.js","../webpack://test-check/./node_modules/@actions/core/lib/command.js","../webpack://test-check/./node_modules/@actions/core/lib/core.js","../webpack://test-check/./node_modules/@actions/core/lib/file-command.js","../webpack://test-check/./node_modules/@actions/core/lib/utils.js","../webpack://test-check/./node_modules/@actions/exec/lib/exec.js","../webpack://test-check/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://test-check/./node_modules/@actions/github/lib/context.js","../webpack://test-check/./node_modules/@actions/github/lib/github.js","../webpack://test-check/./node_modules/@actions/github/lib/internal/utils.js","../webpack://test-check/./node_modules/@actions/github/lib/utils.js","../webpack://test-check/./node_modules/@actions/http-client/index.js","../webpack://test-check/./node_modules/@actions/http-client/proxy.js","../webpack://test-check/./node_modules/@actions/io/lib/io-util.js","../webpack://test-check/./node_modules/@actions/io/lib/io.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/constants.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/stream.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/reader.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/settings.js","../webpack://test-check/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/core/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/before-after-hook/index.js","../webpack://test-check/./node_modules/before-after-hook/lib/add.js","../webpack://test-check/./node_modules/before-after-hook/lib/register.js","../webpack://test-check/./node_modules/before-after-hook/lib/remove.js","../webpack://test-check/./node_modules/deprecation/dist-node/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/compile.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/constants.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/expand.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/parse.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/stringify.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/utils.js","../webpack://test-check/./node_modules/fast-glob/node_modules/fill-range/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/is-number/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/micromatch/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/to-regex-range/index.js","../webpack://test-check/./node_modules/fast-glob/out/index.js","../webpack://test-check/./node_modules/fast-glob/out/managers/tasks.js","../webpack://test-check/./node_modules/fast-glob/out/providers/async.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/deep.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/entry.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/error.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/matcher.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/partial.js","../webpack://test-check/./node_modules/fast-glob/out/providers/provider.js","../webpack://test-check/./node_modules/fast-glob/out/providers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/providers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/providers/transformers/entry.js","../webpack://test-check/./node_modules/fast-glob/out/readers/reader.js","../webpack://test-check/./node_modules/fast-glob/out/readers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/readers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/settings.js","../webpack://test-check/./node_modules/fast-glob/out/utils/array.js","../webpack://test-check/./node_modules/fast-glob/out/utils/errno.js","../webpack://test-check/./node_modules/fast-glob/out/utils/fs.js","../webpack://test-check/./node_modules/fast-glob/out/utils/index.js","../webpack://test-check/./node_modules/fast-glob/out/utils/path.js","../webpack://test-check/./node_modules/fast-glob/out/utils/pattern.js","../webpack://test-check/./node_modules/fast-glob/out/utils/stream.js","../webpack://test-check/./node_modules/fast-glob/out/utils/string.js","../webpack://test-check/./node_modules/fastq/queue.js","../webpack://test-check/./node_modules/glob-parent/index.js","../webpack://test-check/./node_modules/is-extglob/index.js","../webpack://test-check/./node_modules/is-glob/index.js","../webpack://test-check/./node_modules/merge2/index.js","../webpack://test-check/./node_modules/node-fetch/lib/index.js","../webpack://test-check/./node_modules/once/once.js","../webpack://test-check/./node_modules/picomatch/index.js","../webpack://test-check/./node_modules/picomatch/lib/constants.js","../webpack://test-check/./node_modules/picomatch/lib/parse.js","../webpack://test-check/./node_modules/picomatch/lib/picomatch.js","../webpack://test-check/./node_modules/picomatch/lib/scan.js","../webpack://test-check/./node_modules/picomatch/lib/utils.js","../webpack://test-check/./node_modules/reusify/reusify.js","../webpack://test-check/./node_modules/run-parallel/index.js","../webpack://test-check/./node_modules/sax/lib/sax.js","../webpack://test-check/./node_modules/tunnel/index.js","../webpack://test-check/./node_modules/tunnel/lib/tunnel.js","../webpack://test-check/./node_modules/universal-user-agent/dist-node/index.js","../webpack://test-check/./node_modules/wrappy/wrappy.js","../webpack://test-check/./node_modules/xml2js/lib/bom.js","../webpack://test-check/./node_modules/xml2js/lib/builder.js","../webpack://test-check/./node_modules/xml2js/lib/defaults.js","../webpack://test-check/./node_modules/xml2js/lib/parser.js","../webpack://test-check/./node_modules/xml2js/lib/processors.js","../webpack://test-check/./node_modules/xml2js/lib/xml2js.js","../webpack://test-check/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://test-check/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/Utility.js","../webpack://test-check/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://test-check/./node_modules/xmlbuilder/lib/index.js","../webpack://test-check/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://test-check/external \"assert\"","../webpack://test-check/external \"child_process\"","../webpack://test-check/external \"events\"","../webpack://test-check/external \"fs\"","../webpack://test-check/external \"http\"","../webpack://test-check/external \"https\"","../webpack://test-check/external \"net\"","../webpack://test-check/external \"os\"","../webpack://test-check/external \"path\"","../webpack://test-check/external \"stream\"","../webpack://test-check/external \"string_decoder\"","../webpack://test-check/external \"timers\"","../webpack://test-check/external \"tls\"","../webpack://test-check/external \"url\"","../webpack://test-check/external \"util\"","../webpack://test-check/external \"zlib\"","../webpack://test-check/webpack/bootstrap","../webpack://test-check/webpack/runtime/compat","../webpack://test-check/webpack/startup"],"sourcesContent":["\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst fs = __importStar(require(\"fs\"));\r\nconst fast_glob_1 = __importDefault(require(\"fast-glob\"));\r\nconst get_annotations_1 = require(\"./report/get-annotations\");\r\nconst get_report_1 = require(\"./report/get-report\");\r\nconst dart_json_parser_1 = require(\"./parsers/dart-json/dart-json-parser\");\r\nconst dotnet_trx_parser_1 = require(\"./parsers/dotnet-trx/dotnet-trx-parser\");\r\nconst jest_junit_parser_1 = require(\"./parsers/jest-junit/jest-junit-parser\");\r\nconst file_utils_1 = require(\"./utils/file-utils\");\r\nconst git_1 = require(\"./utils/git\");\r\nconst github_utils_1 = require(\"./utils/github-utils\");\r\nconst markdown_utils_1 = require(\"./utils/markdown-utils\");\r\nasync function run() {\r\n try {\r\n await main();\r\n }\r\n catch (error) {\r\n core.setFailed(error.message);\r\n }\r\n}\r\nasync function main() {\r\n const name = core.getInput('name', { required: true });\r\n const path = core.getInput('path', { required: true });\r\n const reporter = core.getInput('reporter', { required: true });\r\n const listSuites = core.getInput('list-suites', { required: true });\r\n const listTests = core.getInput('list-tests', { required: true });\r\n const maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));\r\n const failOnError = core.getInput('fail-on-error', { required: true }) === 'true';\r\n const workDirInput = core.getInput('working-directory', { required: false });\r\n const token = core.getInput('token', { required: true });\r\n if (listSuites !== 'all' && listSuites !== 'failed') {\r\n core.setFailed(`Input parameter 'list-suites' has invalid value`);\r\n return;\r\n }\r\n if (listTests !== 'all' && listTests !== 'failed' && listTests !== 'none') {\r\n core.setFailed(`Input parameter 'list-tests' has invalid value`);\r\n return;\r\n }\r\n if (isNaN(maxAnnotations) || maxAnnotations < 0 || maxAnnotations > 50) {\r\n core.setFailed(`Input parameter 'max-annotations' has invalid value`);\r\n return;\r\n }\r\n if (workDirInput) {\r\n core.info(`Changing directory to '${workDirInput}'`);\r\n process.chdir(workDirInput);\r\n }\r\n const workDir = file_utils_1.normalizeDirPath(process.cwd(), true);\r\n core.info(`Using working-directory '${workDir}'`);\r\n const octokit = github.getOctokit(token);\r\n const sha = github_utils_1.getCheckRunSha();\r\n // We won't need tracked files if we are not going to create annotations\r\n const parseErrors = maxAnnotations > 0;\r\n const trackedFiles = parseErrors ? await git_1.listFiles() : [];\r\n const options = {\r\n trackedFiles,\r\n workDir,\r\n parseErrors\r\n };\r\n core.info(`Using test report parser '${reporter}'`);\r\n const parser = getParser(reporter, options);\r\n const files = await getFiles(path);\r\n if (files.length === 0) {\r\n core.setFailed(`No file matches path '${path}'`);\r\n return;\r\n }\r\n const results = [];\r\n for (const file of files) {\r\n core.info(`Processing test report '${file}'`);\r\n const content = await fs.promises.readFile(file, { encoding: 'utf8' });\r\n const tr = await parser.parse(file, content);\r\n results.push(tr);\r\n }\r\n core.info('Creating report summary');\r\n const summary = get_report_1.getReport(results, { listSuites, listTests });\r\n core.info('Creating annotations');\r\n const annotations = get_annotations_1.getAnnotations(results, maxAnnotations);\r\n const isFailed = results.some(tr => tr.result === 'failed');\r\n const conclusion = isFailed ? 'failure' : 'success';\r\n const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success;\r\n core.info(`Creating check run '${name}' with conclusion '${conclusion}'`);\r\n await octokit.checks.create({\r\n head_sha: sha,\r\n name,\r\n conclusion,\r\n status: 'completed',\r\n output: {\r\n title: `${name} ${icon}`,\r\n summary,\r\n annotations\r\n },\r\n ...github.context.repo\r\n });\r\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\r\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\r\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\r\n const time = results.reduce((sum, tr) => sum + tr.time, 0);\r\n core.setOutput('conclusion', conclusion);\r\n core.setOutput('passed', passed);\r\n core.setOutput('failed', failed);\r\n core.setOutput('skipped', skipped);\r\n core.setOutput('time', time);\r\n if (failOnError && isFailed) {\r\n core.setFailed(`Failed test has been found and 'fail-on-error' option is set to ${failOnError}`);\r\n }\r\n}\r\nfunction getParser(reporter, options) {\r\n switch (reporter) {\r\n case 'dart-json':\r\n return new dart_json_parser_1.DartJsonParser(options, 'dart');\r\n case 'dotnet-trx':\r\n return new dotnet_trx_parser_1.DotnetTrxParser(options);\r\n case 'flutter-json':\r\n return new dart_json_parser_1.DartJsonParser(options, 'flutter');\r\n case 'jest-junit':\r\n return new jest_junit_parser_1.JestJunitParser(options);\r\n default:\r\n throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);\r\n }\r\n}\r\nasync function getFiles(pattern) {\r\n const tasks = pattern.split(',').map(async (pat) => fast_glob_1.default(pat, { dot: true }));\r\n const paths = await Promise.all(tasks);\r\n return paths.flat();\r\n}\r\nexports.getFiles = getFiles;\r\nrun();\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DartJsonParser = void 0;\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst dart_json_types_1 = require(\"./dart-json-types\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass TestRun {\r\n constructor(path, suites, success, time) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.success = success;\r\n this.time = time;\r\n }\r\n}\r\nclass TestSuite {\r\n constructor(suite) {\r\n this.suite = suite;\r\n this.groups = {};\r\n }\r\n}\r\nclass TestGroup {\r\n constructor(group) {\r\n this.group = group;\r\n this.tests = [];\r\n }\r\n}\r\nclass TestCase {\r\n constructor(testStart) {\r\n this.testStart = testStart;\r\n this.print = [];\r\n this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1];\r\n }\r\n get result() {\r\n var _a, _b, _c, _d;\r\n if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) {\r\n return 'skipped';\r\n }\r\n if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') {\r\n return 'success';\r\n }\r\n if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') {\r\n return 'failed';\r\n }\r\n return undefined;\r\n }\r\n get time() {\r\n return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;\r\n }\r\n}\r\nclass DartJsonParser {\r\n constructor(options, sdk) {\r\n this.options = options;\r\n this.sdk = sdk;\r\n }\r\n async parse(path, content) {\r\n const tr = this.getTestRun(path, content);\r\n const result = this.getTestRunResult(tr);\r\n return Promise.resolve(result);\r\n }\r\n getTestRun(path, content) {\r\n const lines = content.split(/\\n\\r?/g);\r\n const events = lines\r\n .map((str, i) => {\r\n if (str.trim() === '') {\r\n return null;\r\n }\r\n try {\r\n return JSON.parse(str);\r\n }\r\n catch (e) {\r\n const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : '';\r\n throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\\n\\n${e}`);\r\n }\r\n })\r\n .filter(evt => evt != null);\r\n let success = false;\r\n let totalTime = 0;\r\n const suites = {};\r\n const tests = {};\r\n for (const evt of events) {\r\n if (dart_json_types_1.isSuiteEvent(evt)) {\r\n suites[evt.suite.id] = new TestSuite(evt.suite);\r\n }\r\n else if (dart_json_types_1.isGroupEvent(evt)) {\r\n suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group);\r\n }\r\n else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) {\r\n const test = new TestCase(evt);\r\n const suite = suites[evt.test.suiteID];\r\n const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]];\r\n group.tests.push(test);\r\n tests[evt.test.id] = test;\r\n }\r\n else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden) {\r\n tests[evt.testID].testDone = evt;\r\n }\r\n else if (dart_json_types_1.isErrorEvent(evt)) {\r\n tests[evt.testID].error = evt;\r\n }\r\n else if (dart_json_types_1.isMessageEvent(evt)) {\r\n tests[evt.testID].print.push(evt);\r\n }\r\n else if (dart_json_types_1.isDoneEvent(evt)) {\r\n success = evt.success;\r\n totalTime = evt.time;\r\n }\r\n }\r\n return new TestRun(path, Object.values(suites), success, totalTime);\r\n }\r\n getTestRunResult(tr) {\r\n const suites = tr.suites.map(s => {\r\n return new test_results_1.TestSuiteResult(this.getRelativePath(s.suite.path), this.getGroups(s));\r\n });\r\n return new test_results_1.TestRunResult(tr.path, suites, tr.time);\r\n }\r\n getGroups(suite) {\r\n const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);\r\n groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); });\r\n return groups.map(group => {\r\n group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); });\r\n const tests = group.tests.map(tc => {\r\n const error = this.getError(suite, tc);\r\n return new test_results_1.TestCaseResult(tc.testStart.test.name, tc.result, tc.time, error);\r\n });\r\n return new test_results_1.TestGroupResult(group.group.name, tests);\r\n });\r\n }\r\n getError(testSuite, test) {\r\n var _a, _b, _c, _d, _e, _f;\r\n if (!this.options.parseErrors || !test.error) {\r\n return undefined;\r\n }\r\n const { trackedFiles } = this.options;\r\n const message = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.error) !== null && _b !== void 0 ? _b : '';\r\n const stackTrace = (_d = (_c = test.error) === null || _c === void 0 ? void 0 : _c.stackTrace) !== null && _d !== void 0 ? _d : '';\r\n const print = test.print\r\n .filter(p => p.messageType === 'print')\r\n .map(p => p.message)\r\n .join('\\n');\r\n const details = [print, stackTrace].filter(str => str !== '').join('\\n');\r\n const src = this.exceptionThrowSource(details, trackedFiles);\r\n let path;\r\n let line;\r\n if (src !== undefined) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n else {\r\n const testStartPath = this.getRelativePath(testSuite.suite.path);\r\n if (trackedFiles.includes(testStartPath)) {\r\n path = testStartPath;\r\n line = (_f = (_e = test.testStart.test.root_line) !== null && _e !== void 0 ? _e : test.testStart.test.line) !== null && _f !== void 0 ? _f : undefined;\r\n }\r\n }\r\n return {\r\n path,\r\n line,\r\n message,\r\n details\r\n };\r\n }\r\n exceptionThrowSource(ex, trackedFiles) {\r\n const lines = ex.split(/\\r?\\n/g);\r\n // regexp to extract file path and line number from stack trace\r\n const dartRe = /^(?!package:)(.*)\\s+(\\d+):\\d+\\s+/;\r\n const flutterRe = /^#\\d+\\s+.*\\((?!package:)(.*):(\\d+):\\d+\\)$/;\r\n const re = this.sdk === 'dart' ? dartRe : flutterRe;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, pathStr, lineStr] = match;\r\n const path = file_utils_1.normalizeFilePath(this.getRelativePath(pathStr));\r\n if (trackedFiles.includes(path)) {\r\n const line = parseInt(lineStr);\r\n return { path, line };\r\n }\r\n }\r\n }\r\n }\r\n getRelativePath(path) {\r\n const { workDir } = this.options;\r\n const prefix = 'file://';\r\n if (path.startsWith(prefix)) {\r\n path = path.substr(prefix.length);\r\n }\r\n if (path.startsWith(workDir)) {\r\n path = path.substr(workDir.length);\r\n }\r\n return file_utils_1.normalizeFilePath(path);\r\n }\r\n}\r\nexports.DartJsonParser = DartJsonParser;\r\n","\"use strict\";\r\n/// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isMessageEvent = exports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0;\r\nfunction isSuiteEvent(event) {\r\n return event.type === 'suite';\r\n}\r\nexports.isSuiteEvent = isSuiteEvent;\r\nfunction isGroupEvent(event) {\r\n return event.type === 'group';\r\n}\r\nexports.isGroupEvent = isGroupEvent;\r\nfunction isTestStartEvent(event) {\r\n return event.type === 'testStart';\r\n}\r\nexports.isTestStartEvent = isTestStartEvent;\r\nfunction isTestDoneEvent(event) {\r\n return event.type === 'testDone';\r\n}\r\nexports.isTestDoneEvent = isTestDoneEvent;\r\nfunction isErrorEvent(event) {\r\n return event.type === 'error';\r\n}\r\nexports.isErrorEvent = isErrorEvent;\r\nfunction isDoneEvent(event) {\r\n return event.type === 'done';\r\n}\r\nexports.isDoneEvent = isDoneEvent;\r\nfunction isMessageEvent(event) {\r\n return event.type === 'print';\r\n}\r\nexports.isMessageEvent = isMessageEvent;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DotnetTrxParser = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst parse_utils_1 = require(\"../../utils/parse-utils\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass TestClass {\r\n constructor(name) {\r\n this.name = name;\r\n this.tests = [];\r\n }\r\n}\r\nclass Test {\r\n constructor(name, outcome, duration, error) {\r\n this.name = name;\r\n this.outcome = outcome;\r\n this.duration = duration;\r\n this.error = error;\r\n }\r\n get result() {\r\n switch (this.outcome) {\r\n case 'Passed':\r\n return 'success';\r\n case 'NotExecuted':\r\n return 'skipped';\r\n case 'Failed':\r\n return 'failed';\r\n }\r\n }\r\n}\r\nclass DotnetTrxParser {\r\n constructor(options) {\r\n this.options = options;\r\n }\r\n async parse(path, content) {\r\n const trx = await this.getTrxReport(path, content);\r\n const tc = this.getTestClasses(trx);\r\n const tr = this.getTestRunResult(path, trx, tc);\r\n return tr;\r\n }\r\n async getTrxReport(path, content) {\r\n try {\r\n return (await xml2js_1.parseStringPromise(content));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\r\n }\r\n }\r\n getTestClasses(trx) {\r\n var _a;\r\n const unitTests = {};\r\n for (const td of trx.TestRun.TestDefinitions) {\r\n for (const ut of td.UnitTest) {\r\n unitTests[ut.$.id] = ut.TestMethod[0];\r\n }\r\n }\r\n const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(unitTestResult => ({\r\n unitTestResult,\r\n testMethod: unitTests[unitTestResult.$.testId]\r\n }));\r\n const testClasses = {};\r\n for (const r of unitTestsResults) {\r\n let tc = testClasses[r.testMethod.$.className];\r\n if (tc === undefined) {\r\n tc = new TestClass(r.testMethod.$.className);\r\n testClasses[tc.name] = tc;\r\n }\r\n const output = r.unitTestResult.Output;\r\n const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined;\r\n const duration = parse_utils_1.parseNetDuration(r.unitTestResult.$.duration);\r\n const test = new Test(r.testMethod.$.name, r.unitTestResult.$.outcome, duration, error);\r\n tc.tests.push(test);\r\n }\r\n const result = Object.values(testClasses);\r\n result.sort((a, b) => a.name.localeCompare(b.name));\r\n for (const tc of result) {\r\n tc.tests.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n return result;\r\n }\r\n getTestRunResult(path, trx, testClasses) {\r\n const times = trx.TestRun.Times[0].$;\r\n const totalTime = parse_utils_1.parseIsoDate(times.finish).getTime() - parse_utils_1.parseIsoDate(times.start).getTime();\r\n const suites = testClasses.map(testClass => {\r\n const tests = testClass.tests.map(test => {\r\n const error = this.getError(test);\r\n return new test_results_1.TestCaseResult(test.name, test.result, test.duration, error);\r\n });\r\n const group = new test_results_1.TestGroupResult(null, tests);\r\n return new test_results_1.TestSuiteResult(testClass.name, [group]);\r\n });\r\n return new test_results_1.TestRunResult(path, suites, totalTime);\r\n }\r\n getError(test) {\r\n if (!this.options.parseErrors || !test.error) {\r\n return undefined;\r\n }\r\n const message = test.error.Message[0];\r\n const stackTrace = test.error.StackTrace[0];\r\n let path;\r\n let line;\r\n const src = this.exceptionThrowSource(stackTrace);\r\n if (src) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n return {\r\n path,\r\n line,\r\n message,\r\n details: `${message}\\n${stackTrace}`\r\n };\r\n }\r\n exceptionThrowSource(stackTrace) {\r\n const lines = stackTrace.split(/\\r*\\n/);\r\n const re = / in (.+):line (\\d+)$/;\r\n const { workDir, trackedFiles } = this.options;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { path: file, line };\r\n }\r\n }\r\n }\r\n }\r\n}\r\nexports.DotnetTrxParser = DotnetTrxParser;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.JestJunitParser = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass JestJunitParser {\r\n constructor(options) {\r\n this.options = options;\r\n }\r\n async parse(path, content) {\r\n const ju = await this.getJunitReport(path, content);\r\n return this.getTestRunResult(path, ju);\r\n }\r\n async getJunitReport(path, content) {\r\n try {\r\n return (await xml2js_1.parseStringPromise(content));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\r\n }\r\n }\r\n getTestRunResult(path, junit) {\r\n const suites = junit.testsuites.testsuite.map(ts => {\r\n const name = ts.$.name.trim();\r\n const time = parseFloat(ts.$.time) * 1000;\r\n const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);\r\n return sr;\r\n });\r\n const time = parseFloat(junit.testsuites.$.time) * 1000;\r\n return new test_results_1.TestRunResult(path, suites, time);\r\n }\r\n getGroups(suite) {\r\n const groups = [];\r\n for (const tc of suite.testcase) {\r\n let grp = groups.find(g => g.describe === tc.$.classname);\r\n if (grp === undefined) {\r\n grp = { describe: tc.$.classname, tests: [] };\r\n groups.push(grp);\r\n }\r\n grp.tests.push(tc);\r\n }\r\n return groups.map(grp => {\r\n const tests = grp.tests.map(tc => {\r\n const name = tc.$.name.trim();\r\n const result = this.getTestCaseResult(tc);\r\n const time = parseFloat(tc.$.time) * 1000;\r\n const error = this.getTestCaseError(tc);\r\n return new test_results_1.TestCaseResult(name, result, time, error);\r\n });\r\n return new test_results_1.TestGroupResult(grp.describe, tests);\r\n });\r\n }\r\n getTestCaseResult(test) {\r\n if (test.failure)\r\n return 'failed';\r\n if (test.skipped)\r\n return 'skipped';\r\n return 'success';\r\n }\r\n getTestCaseError(tc) {\r\n if (!this.options.parseErrors || !tc.failure) {\r\n return undefined;\r\n }\r\n const details = tc.failure[0];\r\n let path;\r\n let line;\r\n const src = this.exceptionThrowSource(details);\r\n if (src) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n return {\r\n path,\r\n line,\r\n details\r\n };\r\n }\r\n exceptionThrowSource(stackTrace) {\r\n const lines = stackTrace.split(/\\r?\\n/);\r\n const re = /\\((.*):(\\d+):\\d+\\)$/;\r\n const { workDir, trackedFiles } = this.options;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const path = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(path)) {\r\n const line = parseInt(lineStr);\r\n return { path, line };\r\n }\r\n }\r\n }\r\n }\r\n}\r\nexports.JestJunitParser = JestJunitParser;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getAnnotations = void 0;\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nfunction getAnnotations(results, maxCount) {\r\n var _a, _b, _c, _d;\r\n if (maxCount === 0) {\r\n return [];\r\n }\r\n // Collect errors from TestRunResults\r\n // Merge duplicates if there are more test results files processed\r\n const errors = [];\r\n const mergeDup = results.length > 1;\r\n for (const tr of results) {\r\n for (const ts of tr.suites) {\r\n for (const tg of ts.groups) {\r\n for (const tc of tg.tests) {\r\n const err = tc.error;\r\n if (err === undefined) {\r\n continue;\r\n }\r\n const path = (_a = err.path) !== null && _a !== void 0 ? _a : tr.path;\r\n const line = (_b = err.line) !== null && _b !== void 0 ? _b : 0;\r\n if (mergeDup) {\r\n const dup = errors.find(e => path === e.path && line === e.line && err.details === e.details);\r\n if (dup !== undefined) {\r\n dup.testRunPaths.push(tr.path);\r\n continue;\r\n }\r\n }\r\n errors.push({\r\n testRunPaths: [tr.path],\r\n suiteName: ts.name,\r\n testName: tc.name,\r\n details: err.details,\r\n message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : getFirstNonEmptyLine(err.details)) !== null && _d !== void 0 ? _d : 'Test failed',\r\n path,\r\n line\r\n });\r\n }\r\n }\r\n }\r\n }\r\n // Limit number of created annotations\r\n errors.splice(maxCount + 1);\r\n const annotations = errors.map(e => {\r\n const message = [\r\n 'Failed test found in:',\r\n e.testRunPaths.map(p => ` ${p}`).join('\\n'),\r\n 'Error:',\r\n ident(markdown_utils_1.fixEol(e.message), ' ')\r\n ].join('\\n');\r\n return enforceCheckRunLimits({\r\n path: e.path,\r\n start_line: e.line,\r\n end_line: e.line,\r\n annotation_level: 'failure',\r\n title: `${e.suiteName} âș ${e.testName}`,\r\n raw_details: markdown_utils_1.fixEol(e.details),\r\n message\r\n });\r\n });\r\n return annotations;\r\n}\r\nexports.getAnnotations = getAnnotations;\r\nfunction enforceCheckRunLimits(err) {\r\n err.title = markdown_utils_1.ellipsis(err.title || '', 255);\r\n err.message = markdown_utils_1.ellipsis(err.message, 65535);\r\n if (err.raw_details) {\r\n err.raw_details = markdown_utils_1.ellipsis(err.raw_details, 65535);\r\n }\r\n return err;\r\n}\r\nfunction getFirstNonEmptyLine(stackTrace) {\r\n const lines = stackTrace.split(/\\r?\\n/g);\r\n return lines.find(str => !/^\\s*$/.test(str));\r\n}\r\nfunction ident(text, prefix) {\r\n return text\r\n .split(/\\n/g)\r\n .map(line => prefix + line)\r\n .join('\\n');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getReport = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nconst slugger_1 = require(\"../utils/slugger\");\r\nfunction getReport(results, options = {}) {\r\n core.info('Generating check run summary');\r\n const maxReportLength = 65535;\r\n const sections = [];\r\n applySort(results);\r\n const badge = getReportBadge(results);\r\n sections.push(badge);\r\n const runs = getTestRunsReport(results, options);\r\n sections.push(...runs);\r\n const report = sections.join('\\n');\r\n if (report.length > maxReportLength) {\r\n let msg = `**Check Run summary limit of ${maxReportLength} chars was exceed**`;\r\n if (options.listTests !== 'all') {\r\n msg += '\\n- Consider setting `list-tests` option to `only-failed` or `none`';\r\n }\r\n if (options.listSuites !== 'all') {\r\n msg += '\\n- Consider setting `list-suites` option to `only-failed`';\r\n }\r\n return `${badge}\\n${msg}`;\r\n }\r\n return report;\r\n}\r\nexports.getReport = getReport;\r\nfunction applySort(results) {\r\n results.sort((a, b) => a.path.localeCompare(b.path));\r\n for (const res of results) {\r\n res.suites.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n}\r\nfunction getReportBadge(results) {\r\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\r\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\r\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\r\n return getBadge(passed, failed, skipped);\r\n}\r\nfunction getBadge(passed, failed, skipped) {\r\n const text = [];\r\n if (passed > 0) {\r\n text.push(`${passed} passed`);\r\n }\r\n if (failed > 0) {\r\n text.push(`${failed} failed`);\r\n }\r\n if (skipped > 0) {\r\n text.push(`${skipped} skipped`);\r\n }\r\n const message = text.length > 0 ? text.join(', ') : 'none';\r\n let color = 'success';\r\n if (failed > 0) {\r\n color = 'critical';\r\n }\r\n else if (passed === 0 && failed === 0) {\r\n color = 'yellow';\r\n }\r\n const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully';\r\n const uri = encodeURIComponent(`tests-${message}-${color}`);\r\n return ``;\r\n}\r\nfunction getTestRunsReport(testRuns, options) {\r\n const sections = [];\r\n if (testRuns.length > 1) {\r\n const tableData = testRuns.map((tr, runIndex) => {\r\n const time = markdown_utils_1.formatTime(tr.time);\r\n const name = tr.path;\r\n const addr = makeRunSlug(runIndex).link;\r\n const nameLink = markdown_utils_1.link(name, addr);\r\n const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : '';\r\n const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : '';\r\n const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : '';\r\n return [nameLink, passed, failed, skipped, time];\r\n });\r\n const resultsTable = markdown_utils_1.table(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData);\r\n sections.push(resultsTable);\r\n }\r\n const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();\r\n sections.push(...suitesReports);\r\n return sections;\r\n}\r\nfunction getSuitesReport(tr, runIndex, options) {\r\n const sections = [];\r\n const trSlug = makeRunSlug(runIndex);\r\n const nameLink = `${tr.path}`;\r\n const icon = getResultIcon(tr.result);\r\n sections.push(`## ${nameLink} ${icon}`);\r\n const time = markdown_utils_1.formatTime(tr.time);\r\n const headingLine2 = `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.`;\r\n sections.push(headingLine2);\r\n const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites;\r\n if (suites.length > 0) {\r\n const suitesTable = markdown_utils_1.table(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => {\r\n const tsTime = markdown_utils_1.formatTime(s.time);\r\n const tsName = s.name;\r\n const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed');\r\n const tsAddr = makeSuiteSlug(runIndex, suiteIndex).link;\r\n const tsNameLink = skipLink ? tsName : markdown_utils_1.link(tsName, tsAddr);\r\n const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : '';\r\n const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : '';\r\n const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : '';\r\n return [tsNameLink, passed, failed, skipped, tsTime];\r\n }));\r\n sections.push(suitesTable);\r\n }\r\n if (options.listTests !== 'none') {\r\n const tests = suites.map((ts, suiteIndex) => getTestsReport(ts, runIndex, suiteIndex, options)).flat();\r\n if (tests.length > 1) {\r\n sections.push(...tests);\r\n }\r\n }\r\n return sections;\r\n}\r\nfunction getTestsReport(ts, runIndex, suiteIndex, options) {\r\n const groups = options.listTests === 'failed' ? ts.failedGroups : ts.groups;\r\n if (groups.length === 0) {\r\n return [];\r\n }\r\n const sections = [];\r\n const tsName = ts.name;\r\n const tsSlug = makeSuiteSlug(runIndex, suiteIndex);\r\n const tsNameLink = `${tsName}`;\r\n const icon = getResultIcon(ts.result);\r\n sections.push(`### ${tsNameLink} ${icon}`);\r\n const tsTime = markdown_utils_1.formatTime(ts.time);\r\n const headingLine2 = `**${ts.tests}** tests were completed in **${tsTime}** with **${ts.passed}** passed, **${ts.failed}** failed and **${ts.skipped}** skipped.`;\r\n sections.push(headingLine2);\r\n for (const grp of groups) {\r\n const tests = options.listTests === 'failed' ? grp.failedTests : grp.tests;\r\n if (tests.length === 0) {\r\n continue;\r\n }\r\n const grpHeader = grp.name ? `\\n**${grp.name}**` : '';\r\n const testsTable = markdown_utils_1.table(['Result', 'Test', 'Time'], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right], ...grp.tests.map(tc => {\r\n const name = tc.name;\r\n const time = markdown_utils_1.formatTime(tc.time);\r\n const result = getResultIcon(tc.result);\r\n return [result, name, time];\r\n }));\r\n sections.push(grpHeader, testsTable);\r\n }\r\n return sections;\r\n}\r\nfunction makeRunSlug(runIndex) {\r\n // use prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`r${runIndex}`);\r\n}\r\nfunction makeSuiteSlug(runIndex, suiteIndex) {\r\n // use prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`r${runIndex}s${suiteIndex}`);\r\n}\r\nfunction getResultIcon(result) {\r\n switch (result) {\r\n case 'success':\r\n return markdown_utils_1.Icon.success;\r\n case 'skipped':\r\n return markdown_utils_1.Icon.skip;\r\n case 'failed':\r\n return markdown_utils_1.Icon.fail;\r\n default:\r\n return '';\r\n }\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;\r\nclass TestRunResult {\r\n constructor(path, suites, totalTime) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.suites.reduce((sum, g) => sum + g.tests, 0);\r\n }\r\n get passed() {\r\n return this.suites.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.suites.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.suites.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.suites.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedSuites() {\r\n return this.suites.filter(s => s.result === 'failed');\r\n }\r\n}\r\nexports.TestRunResult = TestRunResult;\r\nclass TestSuiteResult {\r\n constructor(name, groups, totalTime) {\r\n this.name = name;\r\n this.groups = groups;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.groups.reduce((sum, g) => sum + g.tests.length, 0);\r\n }\r\n get passed() {\r\n return this.groups.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.groups.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.groups.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedGroups() {\r\n return this.groups.filter(grp => grp.result === 'failed');\r\n }\r\n}\r\nexports.TestSuiteResult = TestSuiteResult;\r\nclass TestGroupResult {\r\n constructor(name, tests) {\r\n this.name = name;\r\n this.tests = tests;\r\n }\r\n get passed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0);\r\n }\r\n get failed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0);\r\n }\r\n get skipped() {\r\n return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0);\r\n }\r\n get time() {\r\n return this.tests.reduce((sum, t) => sum + t.time, 0);\r\n }\r\n get result() {\r\n return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedTests() {\r\n return this.tests.filter(tc => tc.result === 'failed');\r\n }\r\n}\r\nexports.TestGroupResult = TestGroupResult;\r\nclass TestCaseResult {\r\n constructor(name, result, time, error) {\r\n this.name = name;\r\n this.result = result;\r\n this.time = time;\r\n this.error = error;\r\n }\r\n}\r\nexports.TestCaseResult = TestCaseResult;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst exec_1 = require(\"@actions/exec\");\r\n// Wraps original exec() function\r\n// Returns exit code and whole stdout/stderr\r\nasync function exec(commandLine, args, options) {\r\n options = options || {};\r\n let stdout = '';\r\n let stderr = '';\r\n options.listeners = {\r\n stdout: (data) => (stdout += data.toString()),\r\n stderr: (data) => (stderr += data.toString())\r\n };\r\n const code = await exec_1.exec(commandLine, args, options);\r\n return { code, stdout, stderr };\r\n}\r\nexports.default = exec;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.normalizeFilePath = exports.normalizeDirPath = void 0;\r\nfunction normalizeDirPath(path, addTrailingSlash) {\r\n if (!path) {\r\n return path;\r\n }\r\n path = normalizeFilePath(path);\r\n if (addTrailingSlash && !path.endsWith('/')) {\r\n path += '/';\r\n }\r\n return path;\r\n}\r\nexports.normalizeDirPath = normalizeDirPath;\r\nfunction normalizeFilePath(path) {\r\n if (!path) {\r\n return path;\r\n }\r\n return path.trim().replace(/\\\\/g, '/');\r\n}\r\nexports.normalizeFilePath = normalizeFilePath;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.listFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst exec_1 = __importDefault(require(\"./exec\"));\r\nasync function listFiles() {\r\n core.startGroup('Listing all files tracked by git');\r\n let output = '';\r\n try {\r\n output = (await exec_1.default('git', ['ls-files', '-z'])).stdout;\r\n }\r\n finally {\r\n fixStdOutNullTermination();\r\n core.endGroup();\r\n }\r\n return output.split('\\u0000').filter(s => s.length > 0);\r\n}\r\nexports.listFiles = listFiles;\r\nfunction fixStdOutNullTermination() {\r\n // Previous command uses NULL as delimiters and output is printed to stdout.\r\n // We have to make sure next thing written to stdout will start on new line.\r\n // Otherwise things like ::set-output wouldn't work.\r\n core.info('');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getCheckRunSha = void 0;\r\nconst github = __importStar(require(\"@actions/github\"));\r\nfunction getCheckRunSha() {\r\n if (github.context.payload.pull_request) {\r\n const pr = github.context.payload.pull_request;\r\n return pr.head.sha;\r\n }\r\n return github.context.sha;\r\n}\r\nexports.getCheckRunSha = getCheckRunSha;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.formatTime = exports.ellipsis = exports.fixEol = exports.tableEscape = exports.table = exports.link = exports.Icon = exports.Align = void 0;\r\nvar Align;\r\n(function (Align) {\r\n Align[\"Left\"] = \":---\";\r\n Align[\"Center\"] = \":---:\";\r\n Align[\"Right\"] = \"---:\";\r\n Align[\"None\"] = \"---\";\r\n})(Align = exports.Align || (exports.Align = {}));\r\nexports.Icon = {\r\n skip: 'âïž',\r\n success: 'âïž',\r\n fail: 'â' // ':x:'\r\n};\r\nfunction link(title, address) {\r\n return `[${title}](${address})`;\r\n}\r\nexports.link = link;\r\nfunction table(headers, align, ...rows) {\r\n const headerRow = `|${headers.map(tableEscape).join('|')}|`;\r\n const alignRow = `|${align.join('|')}|`;\r\n const contentRows = rows.map(row => `|${row.map(tableEscape).join('|')}|`).join('\\n');\r\n return [headerRow, alignRow, contentRows].join('\\n');\r\n}\r\nexports.table = table;\r\nfunction tableEscape(content) {\r\n return content.toString().replace('|', '\\\\|');\r\n}\r\nexports.tableEscape = tableEscape;\r\nfunction fixEol(text) {\r\n var _a;\r\n return (_a = text === null || text === void 0 ? void 0 : text.replace(/\\r/g, '')) !== null && _a !== void 0 ? _a : '';\r\n}\r\nexports.fixEol = fixEol;\r\nfunction ellipsis(text, maxLength) {\r\n if (text.length <= maxLength) {\r\n return text;\r\n }\r\n return text.substr(0, maxLength - 3) + '...';\r\n}\r\nexports.ellipsis = ellipsis;\r\nfunction formatTime(ms) {\r\n if (ms > 1000) {\r\n return `${(ms / 1000).toFixed(3)}s`;\r\n }\r\n return `${Math.round(ms)}ms`;\r\n}\r\nexports.formatTime = formatTime;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.parseIsoDate = exports.parseNetDuration = void 0;\r\nfunction parseNetDuration(str) {\r\n // matches dotnet duration: 00:00:00.0010000\r\n const durationRe = /^(\\d\\d):(\\d\\d):(\\d\\d\\.\\d+)$/;\r\n const durationMatch = str.match(durationRe);\r\n if (durationMatch === null) {\r\n throw new Error(`Invalid format: \"${str}\" is not NET duration`);\r\n }\r\n const [_, hourStr, minStr, secStr] = durationMatch;\r\n return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000;\r\n}\r\nexports.parseNetDuration = parseNetDuration;\r\nfunction parseIsoDate(str) {\r\n const isoDateRe = /^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)$/;\r\n if (str === undefined || !isoDateRe.test(str)) {\r\n throw new Error(`Invalid format: \"${str}\" is not ISO date`);\r\n }\r\n return new Date(str);\r\n}\r\nexports.parseIsoDate = parseIsoDate;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.slug = void 0;\r\n// Returns HTML element id and href link usable as manual anchor links\r\n// This is needed because Github in check run summary doesn't automatically\r\n// create links out of headings as it normally does for other markdown content\r\nfunction slug(name) {\r\n const slugId = name\r\n .trim()\r\n .replace(/_/g, '')\r\n .replace(/[./\\\\]/g, '-')\r\n .replace(/[^\\w-]/g, '');\r\n const id = `user-content-${slugId}`;\r\n const link = `#${slugId}`;\r\n return { id, link };\r\n}\r\nexports.slug = slug;\r\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;\r\nconst NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');\r\nconst MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);\r\nconst MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);\r\nconst SUPPORTED_MAJOR_VERSION = 10;\r\nconst SUPPORTED_MINOR_VERSION = 10;\r\nconst IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;\r\nconst IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;\r\n/**\r\n * IS `true` for Node.js 10.10 and greater.\r\n */\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.scandirSync = exports.scandir = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction scandir(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.scandir = scandir;\r\nfunction scandirSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.scandirSync = scandirSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst rpl = require(\"run-parallel\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings, callback) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings, callback);\r\n }\r\n return readdir(directory, settings, callback);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings, callback) {\r\n settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const entries = dirents.map((dirent) => ({\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n }));\r\n if (!settings.followSymbolicLinks) {\r\n return callSuccessCallback(callback, entries);\r\n }\r\n const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));\r\n rpl(tasks, (rplError, rplEntries) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n callSuccessCallback(callback, rplEntries);\r\n });\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction makeRplTaskEntry(entry, settings) {\r\n return (done) => {\r\n if (!entry.dirent.isSymbolicLink()) {\r\n return done(null, entry);\r\n }\r\n settings.fs.stat(entry.path, (statError, stats) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return done(statError);\r\n }\r\n return done(null, entry);\r\n }\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n return done(null, entry);\r\n });\r\n };\r\n}\r\nfunction readdir(directory, settings, callback) {\r\n settings.fs.readdir(directory, (readdirError, names) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator));\r\n const tasks = filepaths.map((filepath) => {\r\n return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);\r\n });\r\n rpl(tasks, (rplError, results) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n const entries = [];\r\n names.forEach((name, index) => {\r\n const stats = results[index];\r\n const entry = {\r\n name,\r\n path: filepaths[index],\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n entries.push(entry);\r\n });\r\n callSuccessCallback(callback, entries);\r\n });\r\n });\r\n}\r\nexports.readdir = readdir;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = void 0;\r\nfunction joinPathSegments(a, b, separator) {\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings);\r\n }\r\n return readdir(directory, settings);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings) {\r\n const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });\r\n return dirents.map((dirent) => {\r\n const entry = {\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n };\r\n if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {\r\n try {\r\n const stats = settings.fs.statSync(entry.path);\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n }\r\n catch (error) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n throw error;\r\n }\r\n }\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction readdir(directory, settings) {\r\n const names = settings.fs.readdirSync(directory);\r\n return names.map((name) => {\r\n const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);\r\n const stats = fsStat.statSync(entryPath, settings.fsStatSettings);\r\n const entry = {\r\n name,\r\n path: entryPath,\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdir = readdir;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n this.fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this.followSymbolicLinks,\r\n fs: this.fs,\r\n throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.fs = void 0;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.statSync = exports.stat = exports.Settings = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction stat(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.stat = stat;\r\nfunction statSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.statSync = statSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings, callback) {\r\n settings.fs.lstat(path, (lstatError, lstat) => {\r\n if (lstatError !== null) {\r\n return callFailureCallback(callback, lstatError);\r\n }\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n settings.fs.stat(path, (statError, stat) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return callFailureCallback(callback, statError);\r\n }\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n callSuccessCallback(callback, stat);\r\n });\r\n });\r\n}\r\nexports.read = read;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings) {\r\n const lstat = settings.fs.lstatSync(path);\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return lstat;\r\n }\r\n try {\r\n const stat = settings.fs.statSync(path);\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n return stat;\r\n }\r\n catch (error) {\r\n if (!settings.throwErrorOnBrokenSymbolicLink) {\r\n return lstat;\r\n }\r\n throw error;\r\n }\r\n}\r\nexports.read = read;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction walk(directory, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);\r\n }\r\n new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);\r\n}\r\nexports.walk = walk;\r\nfunction walkSync(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new sync_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkSync = walkSync;\r\nfunction walkStream(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new stream_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkStream = walkStream;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async_1 = require(\"../readers/async\");\r\nclass AsyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._storage = new Set();\r\n }\r\n read(callback) {\r\n this._reader.onError((error) => {\r\n callFailureCallback(callback, error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._storage.add(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n callSuccessCallback(callback, [...this._storage]);\r\n });\r\n this._reader.read();\r\n }\r\n}\r\nexports.default = AsyncProvider;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, entries) {\r\n callback(null, entries);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst async_1 = require(\"../readers/async\");\r\nclass StreamProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._stream = new stream_1.Readable({\r\n objectMode: true,\r\n read: () => { },\r\n destroy: () => {\r\n if (!this._reader.isDestroyed) {\r\n this._reader.destroy();\r\n }\r\n }\r\n });\r\n }\r\n read() {\r\n this._reader.onError((error) => {\r\n this._stream.emit('error', error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._stream.push(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n this._stream.push(null);\r\n });\r\n this._reader.read();\r\n return this._stream;\r\n }\r\n}\r\nexports.default = StreamProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nclass SyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new sync_1.default(this._root, this._settings);\r\n }\r\n read() {\r\n return this._reader.read();\r\n }\r\n}\r\nexports.default = SyncProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst events_1 = require(\"events\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst fastq = require(\"fastq\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass AsyncReader extends reader_1.default {\r\n constructor(_root, _settings) {\r\n super(_root, _settings);\r\n this._settings = _settings;\r\n this._scandir = fsScandir.scandir;\r\n this._emitter = new events_1.EventEmitter();\r\n this._queue = fastq(this._worker.bind(this), this._settings.concurrency);\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n this._queue.drain = () => {\r\n if (!this._isFatalError) {\r\n this._emitter.emit('end');\r\n }\r\n };\r\n }\r\n read() {\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n setImmediate(() => {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n });\r\n return this._emitter;\r\n }\r\n get isDestroyed() {\r\n return this._isDestroyed;\r\n }\r\n destroy() {\r\n if (this._isDestroyed) {\r\n throw new Error('The reader is already destroyed');\r\n }\r\n this._isDestroyed = true;\r\n this._queue.killAndDrain();\r\n }\r\n onEntry(callback) {\r\n this._emitter.on('entry', callback);\r\n }\r\n onError(callback) {\r\n this._emitter.once('error', callback);\r\n }\r\n onEnd(callback) {\r\n this._emitter.once('end', callback);\r\n }\r\n _pushToQueue(directory, base) {\r\n const queueItem = { directory, base };\r\n this._queue.push(queueItem, (error) => {\r\n if (error !== null) {\r\n this._handleError(error);\r\n }\r\n });\r\n }\r\n _worker(item, done) {\r\n this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {\r\n if (error !== null) {\r\n return done(error, undefined);\r\n }\r\n for (const entry of entries) {\r\n this._handleEntry(entry, item.base);\r\n }\r\n done(null, undefined);\r\n });\r\n }\r\n _handleError(error) {\r\n if (this._isDestroyed || !common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n this._isFatalError = true;\r\n this._isDestroyed = true;\r\n this._emitter.emit('error', error);\r\n }\r\n _handleEntry(entry, base) {\r\n if (this._isDestroyed || this._isFatalError) {\r\n return;\r\n }\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._emitEntry(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _emitEntry(entry) {\r\n this._emitter.emit('entry', entry);\r\n }\r\n}\r\nexports.default = AsyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;\r\nfunction isFatalError(settings, error) {\r\n if (settings.errorFilter === null) {\r\n return true;\r\n }\r\n return !settings.errorFilter(error);\r\n}\r\nexports.isFatalError = isFatalError;\r\nfunction isAppliedFilter(filter, value) {\r\n return filter === null || filter(value);\r\n}\r\nexports.isAppliedFilter = isAppliedFilter;\r\nfunction replacePathSegmentSeparator(filepath, separator) {\r\n return filepath.split(/[/\\\\]/).join(separator);\r\n}\r\nexports.replacePathSegmentSeparator = replacePathSegmentSeparator;\r\nfunction joinPathSegments(a, b, separator) {\r\n if (a === '') {\r\n return b;\r\n }\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst common = require(\"./common\");\r\nclass Reader {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass SyncReader extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._scandir = fsScandir.scandirSync;\r\n this._storage = new Set();\r\n this._queue = new Set();\r\n }\r\n read() {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n this._handleQueue();\r\n return [...this._storage];\r\n }\r\n _pushToQueue(directory, base) {\r\n this._queue.add({ directory, base });\r\n }\r\n _handleQueue() {\r\n for (const item of this._queue.values()) {\r\n this._handleDirectory(item.directory, item.base);\r\n }\r\n }\r\n _handleDirectory(directory, base) {\r\n try {\r\n const entries = this._scandir(directory, this._settings.fsScandirSettings);\r\n for (const entry of entries) {\r\n this._handleEntry(entry, base);\r\n }\r\n }\r\n catch (error) {\r\n this._handleError(error);\r\n }\r\n }\r\n _handleError(error) {\r\n if (!common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n throw error;\r\n }\r\n _handleEntry(entry, base) {\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._pushToStorage(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _pushToStorage(entry) {\r\n this._storage.add(entry);\r\n }\r\n}\r\nexports.default = SyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.basePath = this._getValue(this._options.basePath, undefined);\r\n this.concurrency = this._getValue(this._options.concurrency, Infinity);\r\n this.deepFilter = this._getValue(this._options.deepFilter, null);\r\n this.entryFilter = this._getValue(this._options.entryFilter, null);\r\n this.errorFilter = this._getValue(this._options.errorFilter, null);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.fsScandirSettings = new fsScandir.Settings({\r\n followSymbolicLinks: this._options.followSymbolicLinks,\r\n fs: this._options.fs,\r\n pathSegmentSeparator: this._options.pathSegmentSeparator,\r\n stats: this._options.stats,\r\n throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.2.1\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ÂŻ\\_(ă)_/ÂŻ\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, [\"authStrategy\"]);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ÂŻ\\_(ă)_/ÂŻ\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.9\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.5.7\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.6.0\";\n\n/**\n * Some âlistâ response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.paginateRest = paginateRest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }]\n },\n codeScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/{archive_format}/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\", {\n mediaType: {\n previews: [\"black-panther\"]\n }\n }],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"4.2.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n/**\n * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary\n * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is\n * done, we will remove the registerEndpoints methods and return the methods\n * directly as with the other plugins. At that point we will also remove the\n * legacy workarounds and deprecations.\n *\n * See the plan at\n * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1\n */\n\nfunction restEndpointMethods(octokit) {\n return endpointsToMethods(octokit, Endpoints);\n}\nrestEndpointMethods.VERSION = VERSION;\n\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.10\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook\n\nfunction addHook (state, kind, name, hook) {\n var orig = hook\n if (!state.registry[name]) {\n state.registry[name] = []\n }\n\n if (kind === 'before') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options))\n }\n }\n\n if (kind === 'after') {\n hook = function (method, options) {\n var result\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_\n return orig(result, options)\n })\n .then(function () {\n return result\n })\n }\n }\n\n if (kind === 'error') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options)\n })\n }\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig\n })\n}\n","module.exports = register\n\nfunction register (state, name, method, options) {\n if (typeof method !== 'function') {\n throw new Error('method for before hook must be a function')\n }\n\n if (!options) {\n options = {}\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options)\n }, method)()\n }\n\n return Promise.resolve()\n .then(function () {\n if (!state.registry[name]) {\n return method(options)\n }\n\n return (state.registry[name]).reduce(function (method, registered) {\n return registered.hook.bind(null, method, options)\n }, method)()\n })\n}\n","module.exports = removeHook\n\nfunction removeHook (state, name, method) {\n if (!state.registry[name]) {\n return\n }\n\n var index = state.registry[name]\n .map(function (registered) { return registered.orig })\n .indexOf(method)\n\n if (index === -1) {\n return\n }\n\n state.registry[name].splice(index, 1)\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nconst stringify = require('./lib/stringify');\nconst compile = require('./lib/compile');\nconst expand = require('./lib/expand');\nconst parse = require('./lib/parse');\n\n/**\n * Expand the given pattern or create a regex-compatible string.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']\n * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {String}\n * @api public\n */\n\nconst braces = (input, options = {}) => {\n let output = [];\n\n if (Array.isArray(input)) {\n for (let pattern of input) {\n let result = braces.create(pattern, options);\n if (Array.isArray(result)) {\n output.push(...result);\n } else {\n output.push(result);\n }\n }\n } else {\n output = [].concat(braces.create(input, options));\n }\n\n if (options && options.expand === true && options.nodupes === true) {\n output = [...new Set(output)];\n }\n return output;\n};\n\n/**\n * Parse the given `str` with the given `options`.\n *\n * ```js\n * // braces.parse(pattern, [, options]);\n * const ast = braces.parse('a/{b,c}/d');\n * console.log(ast);\n * ```\n * @param {String} pattern Brace pattern to parse\n * @param {Object} options\n * @return {Object} Returns an AST\n * @api public\n */\n\nbraces.parse = (input, options = {}) => parse(input, options);\n\n/**\n * Creates a braces string from an AST, or an AST node.\n *\n * ```js\n * const braces = require('braces');\n * let ast = braces.parse('foo/{a,b}/bar');\n * console.log(stringify(ast.nodes[2])); //=> '{a,b}'\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.stringify = (input, options = {}) => {\n if (typeof input === 'string') {\n return stringify(braces.parse(input, options), options);\n }\n return stringify(input, options);\n};\n\n/**\n * Compiles a brace pattern into a regex-compatible, optimized string.\n * This method is called by the main [braces](#braces) function by default.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.compile('a/{b,c}/d'));\n * //=> ['a/(b|c)/d']\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.compile = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n return compile(input, options);\n};\n\n/**\n * Expands a brace pattern into an array. This method is called by the\n * main [braces](#braces) function when `options.expand` is true. Before\n * using this method it's recommended that you read the [performance notes](#performance))\n * and advantages of using [.compile](#compile) instead.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.expand('a/{b,c}/d'));\n * //=> ['a/b/d', 'a/c/d'];\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.expand = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n\n let result = expand(input, options);\n\n // filter out empty strings if specified\n if (options.noempty === true) {\n result = result.filter(Boolean);\n }\n\n // filter out duplicates if specified\n if (options.nodupes === true) {\n result = [...new Set(result)];\n }\n\n return result;\n};\n\n/**\n * Processes a brace pattern and returns either an expanded array\n * (if `options.expand` is true), a highly optimized regex-compatible string.\n * This method is called by the main [braces](#braces) function.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))\n * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.create = (input, options = {}) => {\n if (input === '' || input.length < 3) {\n return [input];\n }\n\n return options.expand !== true\n ? braces.compile(input, options)\n : braces.expand(input, options);\n};\n\n/**\n * Expose \"braces\"\n */\n\nmodule.exports = braces;\n","'use strict';\n\nconst fill = require('fill-range');\nconst utils = require('./utils');\n\nconst compile = (ast, options = {}) => {\n let walk = (node, parent = {}) => {\n let invalidBlock = utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let invalid = invalidBlock === true || invalidNode === true;\n let prefix = options.escapeInvalid === true ? '\\\\' : '';\n let output = '';\n\n if (node.isOpen === true) {\n return prefix + node.value;\n }\n if (node.isClose === true) {\n return prefix + node.value;\n }\n\n if (node.type === 'open') {\n return invalid ? (prefix + node.value) : '(';\n }\n\n if (node.type === 'close') {\n return invalid ? (prefix + node.value) : ')';\n }\n\n if (node.type === 'comma') {\n return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n let range = fill(...args, { ...options, wrap: false, toRegex: true });\n\n if (range.length !== 0) {\n return args.length > 1 && range.length > 1 ? `(${range})` : range;\n }\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += walk(child, node);\n }\n }\n return output;\n };\n\n return walk(ast);\n};\n\nmodule.exports = compile;\n","'use strict';\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n\n // Digits\n CHAR_0: '0', /* 0 */\n CHAR_9: '9', /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 'A', /* A */\n CHAR_LOWERCASE_A: 'a', /* a */\n CHAR_UPPERCASE_Z: 'Z', /* Z */\n CHAR_LOWERCASE_Z: 'z', /* z */\n\n CHAR_LEFT_PARENTHESES: '(', /* ( */\n CHAR_RIGHT_PARENTHESES: ')', /* ) */\n\n CHAR_ASTERISK: '*', /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: '&', /* & */\n CHAR_AT: '@', /* @ */\n CHAR_BACKSLASH: '\\\\', /* \\ */\n CHAR_BACKTICK: '`', /* ` */\n CHAR_CARRIAGE_RETURN: '\\r', /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */\n CHAR_COLON: ':', /* : */\n CHAR_COMMA: ',', /* , */\n CHAR_DOLLAR: '$', /* . */\n CHAR_DOT: '.', /* . */\n CHAR_DOUBLE_QUOTE: '\"', /* \" */\n CHAR_EQUAL: '=', /* = */\n CHAR_EXCLAMATION_MARK: '!', /* ! */\n CHAR_FORM_FEED: '\\f', /* \\f */\n CHAR_FORWARD_SLASH: '/', /* / */\n CHAR_HASH: '#', /* # */\n CHAR_HYPHEN_MINUS: '-', /* - */\n CHAR_LEFT_ANGLE_BRACKET: '<', /* < */\n CHAR_LEFT_CURLY_BRACE: '{', /* { */\n CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */\n CHAR_LINE_FEED: '\\n', /* \\n */\n CHAR_NO_BREAK_SPACE: '\\u00A0', /* \\u00A0 */\n CHAR_PERCENT: '%', /* % */\n CHAR_PLUS: '+', /* + */\n CHAR_QUESTION_MARK: '?', /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */\n CHAR_RIGHT_CURLY_BRACE: '}', /* } */\n CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */\n CHAR_SEMICOLON: ';', /* ; */\n CHAR_SINGLE_QUOTE: '\\'', /* ' */\n CHAR_SPACE: ' ', /* */\n CHAR_TAB: '\\t', /* \\t */\n CHAR_UNDERSCORE: '_', /* _ */\n CHAR_VERTICAL_LINE: '|', /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\\uFEFF' /* \\uFEFF */\n};\n","'use strict';\n\nconst fill = require('fill-range');\nconst stringify = require('./stringify');\nconst utils = require('./utils');\n\nconst append = (queue = '', stash = '', enclose = false) => {\n let result = [];\n\n queue = [].concat(queue);\n stash = [].concat(stash);\n\n if (!stash.length) return queue;\n if (!queue.length) {\n return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;\n }\n\n for (let item of queue) {\n if (Array.isArray(item)) {\n for (let value of item) {\n result.push(append(value, stash, enclose));\n }\n } else {\n for (let ele of stash) {\n if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;\n result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));\n }\n }\n }\n return utils.flatten(result);\n};\n\nconst expand = (ast, options = {}) => {\n let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;\n\n let walk = (node, parent = {}) => {\n node.queue = [];\n\n let p = parent;\n let q = parent.queue;\n\n while (p.type !== 'brace' && p.type !== 'root' && p.parent) {\n p = p.parent;\n q = p.queue;\n }\n\n if (node.invalid || node.dollar) {\n q.push(append(q.pop(), stringify(node, options)));\n return;\n }\n\n if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {\n q.push(append(q.pop(), ['{}']));\n return;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n\n if (utils.exceedsLimit(...args, options.step, rangeLimit)) {\n throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');\n }\n\n let range = fill(...args, options);\n if (range.length === 0) {\n range = stringify(node, options);\n }\n\n q.push(append(q.pop(), range));\n node.nodes = [];\n return;\n }\n\n let enclose = utils.encloseBrace(node);\n let queue = node.queue;\n let block = node;\n\n while (block.type !== 'brace' && block.type !== 'root' && block.parent) {\n block = block.parent;\n queue = block.queue;\n }\n\n for (let i = 0; i < node.nodes.length; i++) {\n let child = node.nodes[i];\n\n if (child.type === 'comma' && node.type === 'brace') {\n if (i === 1) queue.push('');\n queue.push('');\n continue;\n }\n\n if (child.type === 'close') {\n q.push(append(q.pop(), queue, enclose));\n continue;\n }\n\n if (child.value && child.type !== 'open') {\n queue.push(append(queue.pop(), child.value));\n continue;\n }\n\n if (child.nodes) {\n walk(child, node);\n }\n }\n\n return queue;\n };\n\n return utils.flatten(walk(ast));\n};\n\nmodule.exports = expand;\n","'use strict';\n\nconst stringify = require('./stringify');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n CHAR_BACKSLASH, /* \\ */\n CHAR_BACKTICK, /* ` */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_RIGHT_SQUARE_BRACKET, /* ] */\n CHAR_DOUBLE_QUOTE, /* \" */\n CHAR_SINGLE_QUOTE, /* ' */\n CHAR_NO_BREAK_SPACE,\n CHAR_ZERO_WIDTH_NOBREAK_SPACE\n} = require('./constants');\n\n/**\n * parse\n */\n\nconst parse = (input, options = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n let opts = options || {};\n let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n if (input.length > max) {\n throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);\n }\n\n let ast = { type: 'root', input, nodes: [] };\n let stack = [ast];\n let block = ast;\n let prev = ast;\n let brackets = 0;\n let length = input.length;\n let index = 0;\n let depth = 0;\n let value;\n let memo = {};\n\n /**\n * Helpers\n */\n\n const advance = () => input[index++];\n const push = node => {\n if (node.type === 'text' && prev.type === 'dot') {\n prev.type = 'text';\n }\n\n if (prev && prev.type === 'text' && node.type === 'text') {\n prev.value += node.value;\n return;\n }\n\n block.nodes.push(node);\n node.parent = block;\n node.prev = prev;\n prev = node;\n return node;\n };\n\n push({ type: 'bos' });\n\n while (index < length) {\n block = stack[stack.length - 1];\n value = advance();\n\n /**\n * Invalid chars\n */\n\n if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {\n continue;\n }\n\n /**\n * Escaped chars\n */\n\n if (value === CHAR_BACKSLASH) {\n push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });\n continue;\n }\n\n /**\n * Right square bracket (literal): ']'\n */\n\n if (value === CHAR_RIGHT_SQUARE_BRACKET) {\n push({ type: 'text', value: '\\\\' + value });\n continue;\n }\n\n /**\n * Left square bracket: '['\n */\n\n if (value === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n\n let closed = true;\n let next;\n\n while (index < length && (next = advance())) {\n value += next;\n\n if (next === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n continue;\n }\n\n if (next === CHAR_BACKSLASH) {\n value += advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n brackets--;\n\n if (brackets === 0) {\n break;\n }\n }\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === CHAR_LEFT_PARENTHESES) {\n block = push({ type: 'paren', nodes: [] });\n stack.push(block);\n push({ type: 'text', value });\n continue;\n }\n\n if (value === CHAR_RIGHT_PARENTHESES) {\n if (block.type !== 'paren') {\n push({ type: 'text', value });\n continue;\n }\n block = stack.pop();\n push({ type: 'text', value });\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Quotes: '|\"|`\n */\n\n if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {\n let open = value;\n let next;\n\n if (options.keepQuotes !== true) {\n value = '';\n }\n\n while (index < length && (next = advance())) {\n if (next === CHAR_BACKSLASH) {\n value += next + advance();\n continue;\n }\n\n if (next === open) {\n if (options.keepQuotes === true) value += next;\n break;\n }\n\n value += next;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Left curly brace: '{'\n */\n\n if (value === CHAR_LEFT_CURLY_BRACE) {\n depth++;\n\n let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;\n let brace = {\n type: 'brace',\n open: true,\n close: false,\n dollar,\n depth,\n commas: 0,\n ranges: 0,\n nodes: []\n };\n\n block = push(brace);\n stack.push(block);\n push({ type: 'open', value });\n continue;\n }\n\n /**\n * Right curly brace: '}'\n */\n\n if (value === CHAR_RIGHT_CURLY_BRACE) {\n if (block.type !== 'brace') {\n push({ type: 'text', value });\n continue;\n }\n\n let type = 'close';\n block = stack.pop();\n block.close = true;\n\n push({ type, value });\n depth--;\n\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Comma: ','\n */\n\n if (value === CHAR_COMMA && depth > 0) {\n if (block.ranges > 0) {\n block.ranges = 0;\n let open = block.nodes.shift();\n block.nodes = [open, { type: 'text', value: stringify(block) }];\n }\n\n push({ type: 'comma', value });\n block.commas++;\n continue;\n }\n\n /**\n * Dot: '.'\n */\n\n if (value === CHAR_DOT && depth > 0 && block.commas === 0) {\n let siblings = block.nodes;\n\n if (depth === 0 || siblings.length === 0) {\n push({ type: 'text', value });\n continue;\n }\n\n if (prev.type === 'dot') {\n block.range = [];\n prev.value += value;\n prev.type = 'range';\n\n if (block.nodes.length !== 3 && block.nodes.length !== 5) {\n block.invalid = true;\n block.ranges = 0;\n prev.type = 'text';\n continue;\n }\n\n block.ranges++;\n block.args = [];\n continue;\n }\n\n if (prev.type === 'range') {\n siblings.pop();\n\n let before = siblings[siblings.length - 1];\n before.value += prev.value + value;\n prev = before;\n block.ranges--;\n continue;\n }\n\n push({ type: 'dot', value });\n continue;\n }\n\n /**\n * Text\n */\n\n push({ type: 'text', value });\n }\n\n // Mark imbalanced braces and brackets as invalid\n do {\n block = stack.pop();\n\n if (block.type !== 'root') {\n block.nodes.forEach(node => {\n if (!node.nodes) {\n if (node.type === 'open') node.isOpen = true;\n if (node.type === 'close') node.isClose = true;\n if (!node.nodes) node.type = 'text';\n node.invalid = true;\n }\n });\n\n // get the location of the block on parent.nodes (block's siblings)\n let parent = stack[stack.length - 1];\n let index = parent.nodes.indexOf(block);\n // replace the (invalid) block with it's nodes\n parent.nodes.splice(index, 1, ...block.nodes);\n }\n } while (stack.length > 0);\n\n push({ type: 'eos' });\n return ast;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst utils = require('./utils');\n\nmodule.exports = (ast, options = {}) => {\n let stringify = (node, parent = {}) => {\n let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let output = '';\n\n if (node.value) {\n if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {\n return '\\\\' + node.value;\n }\n return node.value;\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += stringify(child);\n }\n }\n return output;\n };\n\n return stringify(ast);\n};\n\n","'use strict';\n\nexports.isInteger = num => {\n if (typeof num === 'number') {\n return Number.isInteger(num);\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isInteger(Number(num));\n }\n return false;\n};\n\n/**\n * Find a node of the given type\n */\n\nexports.find = (node, type) => node.nodes.find(node => node.type === type);\n\n/**\n * Find a node of the given type\n */\n\nexports.exceedsLimit = (min, max, step = 1, limit) => {\n if (limit === false) return false;\n if (!exports.isInteger(min) || !exports.isInteger(max)) return false;\n return ((Number(max) - Number(min)) / Number(step)) >= limit;\n};\n\n/**\n * Escape the given node with '\\\\' before node.value\n */\n\nexports.escapeNode = (block, n = 0, type) => {\n let node = block.nodes[n];\n if (!node) return;\n\n if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {\n if (node.escaped !== true) {\n node.value = '\\\\' + node.value;\n node.escaped = true;\n }\n }\n};\n\n/**\n * Returns true if the given brace node should be enclosed in literal braces\n */\n\nexports.encloseBrace = node => {\n if (node.type !== 'brace') return false;\n if ((node.commas >> 0 + node.ranges >> 0) === 0) {\n node.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a brace node is invalid.\n */\n\nexports.isInvalidBrace = block => {\n if (block.type !== 'brace') return false;\n if (block.invalid === true || block.dollar) return true;\n if ((block.commas >> 0 + block.ranges >> 0) === 0) {\n block.invalid = true;\n return true;\n }\n if (block.open !== true || block.close !== true) {\n block.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a node is an open or close node\n */\n\nexports.isOpenOrClose = node => {\n if (node.type === 'open' || node.type === 'close') {\n return true;\n }\n return node.open === true || node.close === true;\n};\n\n/**\n * Reduce an array of text nodes.\n */\n\nexports.reduce = nodes => nodes.reduce((acc, node) => {\n if (node.type === 'text') acc.push(node.value);\n if (node.type === 'range') node.type = 'text';\n return acc;\n}, []);\n\n/**\n * Flatten an array\n */\n\nexports.flatten = (...args) => {\n const result = [];\n const flat = arr => {\n for (let i = 0; i < arr.length; i++) {\n let ele = arr[i];\n Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);\n }\n return result;\n };\n flat(args);\n return result;\n};\n","/*!\n * fill-range \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\n'use strict';\n\nconst util = require('util');\nconst toRegexRange = require('to-regex-range');\n\nconst isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\n\nconst transform = toNumber => {\n return value => toNumber === true ? Number(value) : String(value);\n};\n\nconst isValidValue = value => {\n return typeof value === 'number' || (typeof value === 'string' && value !== '');\n};\n\nconst isNumber = num => Number.isInteger(+num);\n\nconst zeros = input => {\n let value = `${input}`;\n let index = -1;\n if (value[0] === '-') value = value.slice(1);\n if (value === '0') return false;\n while (value[++index] === '0');\n return index > 0;\n};\n\nconst stringify = (start, end, options) => {\n if (typeof start === 'string' || typeof end === 'string') {\n return true;\n }\n return options.stringify === true;\n};\n\nconst pad = (input, maxLength, toNumber) => {\n if (maxLength > 0) {\n let dash = input[0] === '-' ? '-' : '';\n if (dash) input = input.slice(1);\n input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));\n }\n if (toNumber === false) {\n return String(input);\n }\n return input;\n};\n\nconst toMaxLen = (input, maxLength) => {\n let negative = input[0] === '-' ? '-' : '';\n if (negative) {\n input = input.slice(1);\n maxLength--;\n }\n while (input.length < maxLength) input = '0' + input;\n return negative ? ('-' + input) : input;\n};\n\nconst toSequence = (parts, options) => {\n parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n\n let prefix = options.capture ? '' : '?:';\n let positives = '';\n let negatives = '';\n let result;\n\n if (parts.positives.length) {\n positives = parts.positives.join('|');\n }\n\n if (parts.negatives.length) {\n negatives = `-(${prefix}${parts.negatives.join('|')})`;\n }\n\n if (positives && negatives) {\n result = `${positives}|${negatives}`;\n } else {\n result = positives || negatives;\n }\n\n if (options.wrap) {\n return `(${prefix}${result})`;\n }\n\n return result;\n};\n\nconst toRange = (a, b, isNumbers, options) => {\n if (isNumbers) {\n return toRegexRange(a, b, { wrap: false, ...options });\n }\n\n let start = String.fromCharCode(a);\n if (a === b) return start;\n\n let stop = String.fromCharCode(b);\n return `[${start}-${stop}]`;\n};\n\nconst toRegex = (start, end, options) => {\n if (Array.isArray(start)) {\n let wrap = options.wrap === true;\n let prefix = options.capture ? '' : '?:';\n return wrap ? `(${prefix}${start.join('|')})` : start.join('|');\n }\n return toRegexRange(start, end, options);\n};\n\nconst rangeError = (...args) => {\n return new RangeError('Invalid range arguments: ' + util.inspect(...args));\n};\n\nconst invalidRange = (start, end, options) => {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n};\n\nconst invalidStep = (step, options) => {\n if (options.strictRanges === true) {\n throw new TypeError(`Expected step \"${step}\" to be a number`);\n }\n return [];\n};\n\nconst fillNumbers = (start, end, step = 1, options = {}) => {\n let a = Number(start);\n let b = Number(end);\n\n if (!Number.isInteger(a) || !Number.isInteger(b)) {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n }\n\n // fix negative zero\n if (a === 0) a = 0;\n if (b === 0) b = 0;\n\n let descending = a > b;\n let startString = String(start);\n let endString = String(end);\n let stepString = String(step);\n step = Math.max(Math.abs(step), 1);\n\n let padded = zeros(startString) || zeros(endString) || zeros(stepString);\n let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;\n let toNumber = padded === false && stringify(start, end, options) === false;\n let format = options.transform || transform(toNumber);\n\n if (options.toRegex && step === 1) {\n return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);\n }\n\n let parts = { negatives: [], positives: [] };\n let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n if (options.toRegex === true && step > 1) {\n push(a);\n } else {\n range.push(pad(format(a, index), maxLen, toNumber));\n }\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return step > 1\n ? toSequence(parts, options)\n : toRegex(range, null, { wrap: false, ...options });\n }\n\n return range;\n};\n\nconst fillLetters = (start, end, step = 1, options = {}) => {\n if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {\n return invalidRange(start, end, options);\n }\n\n\n let format = options.transform || (val => String.fromCharCode(val));\n let a = `${start}`.charCodeAt(0);\n let b = `${end}`.charCodeAt(0);\n\n let descending = a > b;\n let min = Math.min(a, b);\n let max = Math.max(a, b);\n\n if (options.toRegex && step === 1) {\n return toRange(min, max, false, options);\n }\n\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n range.push(format(a, index));\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return toRegex(range, null, { wrap: false, options });\n }\n\n return range;\n};\n\nconst fill = (start, end, step, options = {}) => {\n if (end == null && isValidValue(start)) {\n return [start];\n }\n\n if (!isValidValue(start) || !isValidValue(end)) {\n return invalidRange(start, end, options);\n }\n\n if (typeof step === 'function') {\n return fill(start, end, 1, { transform: step });\n }\n\n if (isObject(step)) {\n return fill(start, end, 0, step);\n }\n\n let opts = { ...options };\n if (opts.capture === true) opts.wrap = true;\n step = step || opts.step || 1;\n\n if (!isNumber(step)) {\n if (step != null && !isObject(step)) return invalidStep(step, opts);\n return fill(start, end, 1, step);\n }\n\n if (isNumber(start) && isNumber(end)) {\n return fillNumbers(start, end, step, opts);\n }\n\n return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);\n};\n\nmodule.exports = fill;\n","/*!\n * is-number \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nmodule.exports = function(num) {\n if (typeof num === 'number') {\n return num - num === 0;\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);\n }\n return false;\n};\n","'use strict';\n\nconst util = require('util');\nconst braces = require('braces');\nconst picomatch = require('picomatch');\nconst utils = require('picomatch/lib/utils');\nconst isEmptyString = val => typeof val === 'string' && (val === '' || val === './');\n\n/**\n * Returns an array of strings that match one or more glob patterns.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm(list, patterns[, options]);\n *\n * console.log(mm(['a.js', 'a.txt'], ['*.js']));\n * //=> [ 'a.js' ]\n * ```\n * @param {String|Array} list List of strings to match.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} options See available [options](#options)\n * @return {Array} Returns an array of matches\n * @summary false\n * @api public\n */\n\nconst micromatch = (list, patterns, options) => {\n patterns = [].concat(patterns);\n list = [].concat(list);\n\n let omit = new Set();\n let keep = new Set();\n let items = new Set();\n let negatives = 0;\n\n let onResult = state => {\n items.add(state.output);\n if (options && options.onResult) {\n options.onResult(state);\n }\n };\n\n for (let i = 0; i < patterns.length; i++) {\n let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);\n let negated = isMatch.state.negated || isMatch.state.negatedExtglob;\n if (negated) negatives++;\n\n for (let item of list) {\n let matched = isMatch(item, true);\n\n let match = negated ? !matched.isMatch : matched.isMatch;\n if (!match) continue;\n\n if (negated) {\n omit.add(matched.output);\n } else {\n omit.delete(matched.output);\n keep.add(matched.output);\n }\n }\n }\n\n let result = negatives === patterns.length ? [...items] : [...keep];\n let matches = result.filter(item => !omit.has(item));\n\n if (options && matches.length === 0) {\n if (options.failglob === true) {\n throw new Error(`No matches found for \"${patterns.join(', ')}\"`);\n }\n\n if (options.nonull === true || options.nullglob === true) {\n return options.unescape ? patterns.map(p => p.replace(/\\\\/g, '')) : patterns;\n }\n }\n\n return matches;\n};\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.match = micromatch;\n\n/**\n * Returns a matcher function from the given glob `pattern` and `options`.\n * The returned function takes a string to match as its only argument and returns\n * true if the string is a match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matcher(pattern[, options]);\n *\n * const isMatch = mm.matcher('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @param {String} `pattern` Glob pattern\n * @param {Object} `options`\n * @return {Function} Returns a matcher function.\n * @api public\n */\n\nmicromatch.matcher = (pattern, options) => picomatch(pattern, options);\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.isMatch(string, patterns[, options]);\n *\n * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(mm.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.any = micromatch.isMatch;\n\n/**\n * Returns a list of strings that _**do not match any**_ of the given `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.not(list, patterns[, options]);\n *\n * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));\n * //=> ['b.b', 'c.c']\n * ```\n * @param {Array} `list` Array of strings to match.\n * @param {String|Array} `patterns` One or more glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Array} Returns an array of strings that **do not match** the given patterns.\n * @api public\n */\n\nmicromatch.not = (list, patterns, options = {}) => {\n patterns = [].concat(patterns).map(String);\n let result = new Set();\n let items = [];\n\n let onResult = state => {\n if (options.onResult) options.onResult(state);\n items.push(state.output);\n };\n\n let matches = micromatch(list, patterns, { ...options, onResult });\n\n for (let item of items) {\n if (!matches.includes(item)) {\n result.add(item);\n }\n }\n return [...result];\n};\n\n/**\n * Returns true if the given `string` contains the given pattern. Similar\n * to [.isMatch](#isMatch) but the pattern can match any part of the string.\n *\n * ```js\n * var mm = require('micromatch');\n * // mm.contains(string, pattern[, options]);\n *\n * console.log(mm.contains('aa/bb/cc', '*b'));\n * //=> true\n * console.log(mm.contains('aa/bb/cc', '*d'));\n * //=> false\n * ```\n * @param {String} `str` The string to match.\n * @param {String|Array} `patterns` Glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if the patter matches any part of `str`.\n * @api public\n */\n\nmicromatch.contains = (str, pattern, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n if (Array.isArray(pattern)) {\n return pattern.some(p => micromatch.contains(str, p, options));\n }\n\n if (typeof pattern === 'string') {\n if (isEmptyString(str) || isEmptyString(pattern)) {\n return false;\n }\n\n if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {\n return true;\n }\n }\n\n return micromatch.isMatch(str, pattern, { ...options, contains: true });\n};\n\n/**\n * Filter the keys of the given object with the given `glob` pattern\n * and `options`. Does not attempt to match nested keys. If you need this feature,\n * use [glob-object][] instead.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matchKeys(object, patterns[, options]);\n *\n * const obj = { aa: 'a', ab: 'b', ac: 'c' };\n * console.log(mm.matchKeys(obj, '*b'));\n * //=> { ab: 'b' }\n * ```\n * @param {Object} `object` The object with keys to filter.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Object} Returns an object with only keys that match the given patterns.\n * @api public\n */\n\nmicromatch.matchKeys = (obj, patterns, options) => {\n if (!utils.isObject(obj)) {\n throw new TypeError('Expected the first argument to be an object');\n }\n let keys = micromatch(Object.keys(obj), patterns, options);\n let res = {};\n for (let key of keys) res[key] = obj[key];\n return res;\n};\n\n/**\n * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.some(list, patterns[, options]);\n *\n * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // true\n * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.some = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (items.some(item => isMatch(item))) {\n return true;\n }\n }\n return false;\n};\n\n/**\n * Returns true if every string in the given `list` matches\n * any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.every(list, patterns[, options]);\n *\n * console.log(mm.every('foo.js', ['foo.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // false\n * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.every = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (!items.every(item => isMatch(item))) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Returns true if **all** of the given `patterns` match\n * the specified string.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.all(string, patterns[, options]);\n *\n * console.log(mm.all('foo.js', ['foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', '!foo.js']));\n * // false\n *\n * console.log(mm.all('foo.js', ['*.js', 'foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));\n * // true\n * ```\n * @param {String|Array} `str` The string to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.all = (str, patterns, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n return [].concat(patterns).every(p => picomatch(p, options)(str));\n};\n\n/**\n * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.capture(pattern, string[, options]);\n *\n * console.log(mm.capture('test/*.js', 'test/foo.js'));\n * //=> ['foo']\n * console.log(mm.capture('test/*.js', 'foo/bar.css'));\n * //=> null\n * ```\n * @param {String} `glob` Glob pattern to use for matching.\n * @param {String} `input` String to match\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.\n * @api public\n */\n\nmicromatch.capture = (glob, input, options) => {\n let posix = utils.isWindows(options);\n let regex = picomatch.makeRe(String(glob), { ...options, capture: true });\n let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);\n\n if (match) {\n return match.slice(1).map(v => v === void 0 ? '' : v);\n }\n};\n\n/**\n * Create a regular expression from the given glob `pattern`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.makeRe(pattern[, options]);\n *\n * console.log(mm.makeRe('*.js'));\n * //=> /^(?:(\\.[\\\\\\/])?(?!\\.)(?=.)[^\\/]*?\\.js)$/\n * ```\n * @param {String} `pattern` A glob pattern to convert to regex.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\nmicromatch.makeRe = (...args) => picomatch.makeRe(...args);\n\n/**\n * Scan a glob pattern to separate the pattern into segments. Used\n * by the [split](#split) method.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm.scan(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\nmicromatch.scan = (...args) => picomatch.scan(...args);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm(pattern[, options]);\n * ```\n * @param {String} `glob`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as regex source string.\n * @api public\n */\n\nmicromatch.parse = (patterns, options) => {\n let res = [];\n for (let pattern of [].concat(patterns || [])) {\n for (let str of braces(String(pattern), options)) {\n res.push(picomatch.parse(str, options));\n }\n }\n return res;\n};\n\n/**\n * Process the given brace `pattern`.\n *\n * ```js\n * const { braces } = require('micromatch');\n * console.log(braces('foo/{a,b,c}/bar'));\n * //=> [ 'foo/(a|b|c)/bar' ]\n *\n * console.log(braces('foo/{a,b,c}/bar', { expand: true }));\n * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]\n * ```\n * @param {String} `pattern` String with brace pattern to process.\n * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.\n * @return {Array}\n * @api public\n */\n\nmicromatch.braces = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n if ((options && options.nobrace === true) || !/\\{.*\\}/.test(pattern)) {\n return [pattern];\n }\n return braces(pattern, options);\n};\n\n/**\n * Expand braces\n */\n\nmicromatch.braceExpand = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n return micromatch.braces(pattern, { ...options, expand: true });\n};\n\n/**\n * Expose micromatch\n */\n\nmodule.exports = micromatch;\n","/*!\n * to-regex-range \n *\n * Copyright (c) 2015-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nconst isNumber = require('is-number');\n\nconst toRegexRange = (min, max, options) => {\n if (isNumber(min) === false) {\n throw new TypeError('toRegexRange: expected the first argument to be a number');\n }\n\n if (max === void 0 || min === max) {\n return String(min);\n }\n\n if (isNumber(max) === false) {\n throw new TypeError('toRegexRange: expected the second argument to be a number.');\n }\n\n let opts = { relaxZeros: true, ...options };\n if (typeof opts.strictZeros === 'boolean') {\n opts.relaxZeros = opts.strictZeros === false;\n }\n\n let relax = String(opts.relaxZeros);\n let shorthand = String(opts.shorthand);\n let capture = String(opts.capture);\n let wrap = String(opts.wrap);\n let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;\n\n if (toRegexRange.cache.hasOwnProperty(cacheKey)) {\n return toRegexRange.cache[cacheKey].result;\n }\n\n let a = Math.min(min, max);\n let b = Math.max(min, max);\n\n if (Math.abs(a - b) === 1) {\n let result = min + '|' + max;\n if (opts.capture) {\n return `(${result})`;\n }\n if (opts.wrap === false) {\n return result;\n }\n return `(?:${result})`;\n }\n\n let isPadded = hasPadding(min) || hasPadding(max);\n let state = { min, max, a, b };\n let positives = [];\n let negatives = [];\n\n if (isPadded) {\n state.isPadded = isPadded;\n state.maxLen = String(state.max).length;\n }\n\n if (a < 0) {\n let newMin = b < 0 ? Math.abs(b) : 1;\n negatives = splitToPatterns(newMin, Math.abs(a), state, opts);\n a = state.a = 0;\n }\n\n if (b >= 0) {\n positives = splitToPatterns(a, b, state, opts);\n }\n\n state.negatives = negatives;\n state.positives = positives;\n state.result = collatePatterns(negatives, positives, opts);\n\n if (opts.capture === true) {\n state.result = `(${state.result})`;\n } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {\n state.result = `(?:${state.result})`;\n }\n\n toRegexRange.cache[cacheKey] = state;\n return state.result;\n};\n\nfunction collatePatterns(neg, pos, options) {\n let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];\n let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];\n let intersected = filterPatterns(neg, pos, '-?', true, options) || [];\n let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);\n return subpatterns.join('|');\n}\n\nfunction splitToRanges(min, max) {\n let nines = 1;\n let zeros = 1;\n\n let stop = countNines(min, nines);\n let stops = new Set([max]);\n\n while (min <= stop && stop <= max) {\n stops.add(stop);\n nines += 1;\n stop = countNines(min, nines);\n }\n\n stop = countZeros(max + 1, zeros) - 1;\n\n while (min < stop && stop <= max) {\n stops.add(stop);\n zeros += 1;\n stop = countZeros(max + 1, zeros) - 1;\n }\n\n stops = [...stops];\n stops.sort(compare);\n return stops;\n}\n\n/**\n * Convert a range to a regex pattern\n * @param {Number} `start`\n * @param {Number} `stop`\n * @return {String}\n */\n\nfunction rangeToPattern(start, stop, options) {\n if (start === stop) {\n return { pattern: start, count: [], digits: 0 };\n }\n\n let zipped = zip(start, stop);\n let digits = zipped.length;\n let pattern = '';\n let count = 0;\n\n for (let i = 0; i < digits; i++) {\n let [startDigit, stopDigit] = zipped[i];\n\n if (startDigit === stopDigit) {\n pattern += startDigit;\n\n } else if (startDigit !== '0' || stopDigit !== '9') {\n pattern += toCharacterClass(startDigit, stopDigit, options);\n\n } else {\n count++;\n }\n }\n\n if (count) {\n pattern += options.shorthand === true ? '\\\\d' : '[0-9]';\n }\n\n return { pattern, count: [count], digits };\n}\n\nfunction splitToPatterns(min, max, tok, options) {\n let ranges = splitToRanges(min, max);\n let tokens = [];\n let start = min;\n let prev;\n\n for (let i = 0; i < ranges.length; i++) {\n let max = ranges[i];\n let obj = rangeToPattern(String(start), String(max), options);\n let zeros = '';\n\n if (!tok.isPadded && prev && prev.pattern === obj.pattern) {\n if (prev.count.length > 1) {\n prev.count.pop();\n }\n\n prev.count.push(obj.count[0]);\n prev.string = prev.pattern + toQuantifier(prev.count);\n start = max + 1;\n continue;\n }\n\n if (tok.isPadded) {\n zeros = padZeros(max, tok, options);\n }\n\n obj.string = zeros + obj.pattern + toQuantifier(obj.count);\n tokens.push(obj);\n start = max + 1;\n prev = obj;\n }\n\n return tokens;\n}\n\nfunction filterPatterns(arr, comparison, prefix, intersection, options) {\n let result = [];\n\n for (let ele of arr) {\n let { string } = ele;\n\n // only push if _both_ are negative...\n if (!intersection && !contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n\n // or _both_ are positive\n if (intersection && contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n }\n return result;\n}\n\n/**\n * Zip strings\n */\n\nfunction zip(a, b) {\n let arr = [];\n for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);\n return arr;\n}\n\nfunction compare(a, b) {\n return a > b ? 1 : b > a ? -1 : 0;\n}\n\nfunction contains(arr, key, val) {\n return arr.some(ele => ele[key] === val);\n}\n\nfunction countNines(min, len) {\n return Number(String(min).slice(0, -len) + '9'.repeat(len));\n}\n\nfunction countZeros(integer, zeros) {\n return integer - (integer % Math.pow(10, zeros));\n}\n\nfunction toQuantifier(digits) {\n let [start = 0, stop = ''] = digits;\n if (stop || start > 1) {\n return `{${start + (stop ? ',' + stop : '')}}`;\n }\n return '';\n}\n\nfunction toCharacterClass(a, b, options) {\n return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;\n}\n\nfunction hasPadding(str) {\n return /^-?(0+)\\d/.test(str);\n}\n\nfunction padZeros(value, tok, options) {\n if (!tok.isPadded) {\n return value;\n }\n\n let diff = Math.abs(tok.maxLen - String(value).length);\n let relax = options.relaxZeros !== false;\n\n switch (diff) {\n case 0:\n return '';\n case 1:\n return relax ? '0?' : '0';\n case 2:\n return relax ? '0{0,2}' : '00';\n default: {\n return relax ? `0{0,${diff}}` : `0{${diff}}`;\n }\n }\n}\n\n/**\n * Cache\n */\n\ntoRegexRange.cache = {};\ntoRegexRange.clearCache = () => (toRegexRange.cache = {});\n\n/**\n * Expose `toRegexRange`\n */\n\nmodule.exports = toRegexRange;\n","\"use strict\";\r\nconst taskManager = require(\"./managers/tasks\");\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nconst utils = require(\"./utils\");\r\nasync function FastGlob(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, async_1.default, options);\r\n const result = await Promise.all(works);\r\n return utils.array.flatten(result);\r\n}\r\n// https://github.com/typescript-eslint/typescript-eslint/issues/60\r\n// eslint-disable-next-line no-redeclare\r\n(function (FastGlob) {\r\n function sync(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, sync_1.default, options);\r\n return utils.array.flatten(works);\r\n }\r\n FastGlob.sync = sync;\r\n function stream(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, stream_1.default, options);\r\n /**\r\n * The stream returned by the provider cannot work with an asynchronous iterator.\r\n * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.\r\n * This affects performance (+25%). I don't see best solution right now.\r\n */\r\n return utils.stream.merge(works);\r\n }\r\n FastGlob.stream = stream;\r\n function generateTasks(source, options) {\r\n assertPatternsInput(source);\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n return taskManager.generate(patterns, settings);\r\n }\r\n FastGlob.generateTasks = generateTasks;\r\n function isDynamicPattern(source, options) {\r\n assertPatternsInput(source);\r\n const settings = new settings_1.default(options);\r\n return utils.pattern.isDynamicPattern(source, settings);\r\n }\r\n FastGlob.isDynamicPattern = isDynamicPattern;\r\n function escapePath(source) {\r\n assertPatternsInput(source);\r\n return utils.path.escape(source);\r\n }\r\n FastGlob.escapePath = escapePath;\r\n})(FastGlob || (FastGlob = {}));\r\nfunction getWorks(source, _Provider, options) {\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n const tasks = taskManager.generate(patterns, settings);\r\n const provider = new _Provider(settings);\r\n return tasks.map(provider.read, provider);\r\n}\r\nfunction assertPatternsInput(input) {\r\n const source = [].concat(input);\r\n const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));\r\n if (!isValidSource) {\r\n throw new TypeError('Patterns must be a string (non empty) or an array of strings');\r\n }\r\n}\r\nmodule.exports = FastGlob;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;\r\nconst utils = require(\"../utils\");\r\nfunction generate(patterns, settings) {\r\n const positivePatterns = getPositivePatterns(patterns);\r\n const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);\r\n const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));\r\n const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));\r\n const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);\r\n const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);\r\n return staticTasks.concat(dynamicTasks);\r\n}\r\nexports.generate = generate;\r\nfunction convertPatternsToTasks(positive, negative, dynamic) {\r\n const positivePatternsGroup = groupPatternsByBaseDirectory(positive);\r\n // When we have a global group â there is no reason to divide the patterns into independent tasks.\r\n // In this case, the global task covers the rest.\r\n if ('.' in positivePatternsGroup) {\r\n const task = convertPatternGroupToTask('.', positive, negative, dynamic);\r\n return [task];\r\n }\r\n return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);\r\n}\r\nexports.convertPatternsToTasks = convertPatternsToTasks;\r\nfunction getPositivePatterns(patterns) {\r\n return utils.pattern.getPositivePatterns(patterns);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getNegativePatternsAsPositive(patterns, ignore) {\r\n const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);\r\n const positive = negative.map(utils.pattern.convertToPositivePattern);\r\n return positive;\r\n}\r\nexports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;\r\nfunction groupPatternsByBaseDirectory(patterns) {\r\n const group = {};\r\n return patterns.reduce((collection, pattern) => {\r\n const base = utils.pattern.getBaseDirectory(pattern);\r\n if (base in collection) {\r\n collection[base].push(pattern);\r\n }\r\n else {\r\n collection[base] = [pattern];\r\n }\r\n return collection;\r\n }, group);\r\n}\r\nexports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;\r\nfunction convertPatternGroupsToTasks(positive, negative, dynamic) {\r\n return Object.keys(positive).map((base) => {\r\n return convertPatternGroupToTask(base, positive[base], negative, dynamic);\r\n });\r\n}\r\nexports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;\r\nfunction convertPatternGroupToTask(base, positive, negative, dynamic) {\r\n return {\r\n dynamic,\r\n positive,\r\n negative,\r\n base,\r\n patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))\r\n };\r\n}\r\nexports.convertPatternGroupToTask = convertPatternGroupToTask;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderAsync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = [];\r\n return new Promise((resolve, reject) => {\r\n const stream = this.api(root, task, options);\r\n stream.once('error', reject);\r\n stream.on('data', (entry) => entries.push(options.transform(entry)));\r\n stream.once('end', () => resolve(entries));\r\n });\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderAsync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nconst partial_1 = require(\"../matchers/partial\");\r\nclass DeepFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n }\r\n getFilter(basePath, positive, negative) {\r\n const matcher = this._getMatcher(positive);\r\n const negativeRe = this._getNegativePatternsRe(negative);\r\n return (entry) => this._filter(basePath, entry, matcher, negativeRe);\r\n }\r\n _getMatcher(patterns) {\r\n return new partial_1.default(patterns, this._settings, this._micromatchOptions);\r\n }\r\n _getNegativePatternsRe(patterns) {\r\n const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);\r\n return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);\r\n }\r\n _filter(basePath, entry, matcher, negativeRe) {\r\n if (this._isSkippedByDeep(basePath, entry.path)) {\r\n return false;\r\n }\r\n if (this._isSkippedSymbolicLink(entry)) {\r\n return false;\r\n }\r\n const filepath = utils.path.removeLeadingDotSegment(entry.path);\r\n if (this._isSkippedByPositivePatterns(filepath, matcher)) {\r\n return false;\r\n }\r\n return this._isSkippedByNegativePatterns(filepath, negativeRe);\r\n }\r\n _isSkippedByDeep(basePath, entryPath) {\r\n /**\r\n * Avoid unnecessary depth calculations when it doesn't matter.\r\n */\r\n if (this._settings.deep === Infinity) {\r\n return false;\r\n }\r\n return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;\r\n }\r\n _getEntryLevel(basePath, entryPath) {\r\n const entryPathDepth = entryPath.split('/').length;\r\n if (basePath === '') {\r\n return entryPathDepth;\r\n }\r\n const basePathDepth = basePath.split('/').length;\r\n return entryPathDepth - basePathDepth;\r\n }\r\n _isSkippedSymbolicLink(entry) {\r\n return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();\r\n }\r\n _isSkippedByPositivePatterns(entryPath, matcher) {\r\n return !this._settings.baseNameMatch && !matcher.match(entryPath);\r\n }\r\n _isSkippedByNegativePatterns(entryPath, patternsRe) {\r\n return !utils.pattern.matchAny(entryPath, patternsRe);\r\n }\r\n}\r\nexports.default = DeepFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this.index = new Map();\r\n }\r\n getFilter(positive, negative) {\r\n const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);\r\n const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);\r\n return (entry) => this._filter(entry, positiveRe, negativeRe);\r\n }\r\n _filter(entry, positiveRe, negativeRe) {\r\n if (this._settings.unique && this._isDuplicateEntry(entry)) {\r\n return false;\r\n }\r\n if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {\r\n return false;\r\n }\r\n if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {\r\n return false;\r\n }\r\n const filepath = this._settings.baseNameMatch ? entry.name : entry.path;\r\n const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);\r\n if (this._settings.unique && isMatched) {\r\n this._createIndexRecord(entry);\r\n }\r\n return isMatched;\r\n }\r\n _isDuplicateEntry(entry) {\r\n return this.index.has(entry.path);\r\n }\r\n _createIndexRecord(entry) {\r\n this.index.set(entry.path, undefined);\r\n }\r\n _onlyFileFilter(entry) {\r\n return this._settings.onlyFiles && !entry.dirent.isFile();\r\n }\r\n _onlyDirectoryFilter(entry) {\r\n return this._settings.onlyDirectories && !entry.dirent.isDirectory();\r\n }\r\n _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {\r\n if (!this._settings.absolute) {\r\n return false;\r\n }\r\n const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);\r\n return utils.pattern.matchAny(fullpath, patternsRe);\r\n }\r\n _isMatchToPatterns(entryPath, patternsRe) {\r\n const filepath = utils.path.removeLeadingDotSegment(entryPath);\r\n return utils.pattern.matchAny(filepath, patternsRe);\r\n }\r\n}\r\nexports.default = EntryFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass ErrorFilter {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getFilter() {\r\n return (error) => this._isNonFatalError(error);\r\n }\r\n _isNonFatalError(error) {\r\n return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = ErrorFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass Matcher {\r\n constructor(_patterns, _settings, _micromatchOptions) {\r\n this._patterns = _patterns;\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this._storage = [];\r\n this._fillStorage();\r\n }\r\n _fillStorage() {\r\n /**\r\n * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).\r\n * So, before expand patterns with brace expansion into separated patterns.\r\n */\r\n const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);\r\n for (const pattern of patterns) {\r\n const segments = this._getPatternSegments(pattern);\r\n const sections = this._splitSegmentsIntoSections(segments);\r\n this._storage.push({\r\n complete: sections.length <= 1,\r\n pattern,\r\n segments,\r\n sections\r\n });\r\n }\r\n }\r\n _getPatternSegments(pattern) {\r\n const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);\r\n return parts.map((part) => {\r\n const dynamic = utils.pattern.isDynamicPattern(part, this._settings);\r\n if (!dynamic) {\r\n return {\r\n dynamic: false,\r\n pattern: part\r\n };\r\n }\r\n return {\r\n dynamic: true,\r\n pattern: part,\r\n patternRe: utils.pattern.makeRe(part, this._micromatchOptions)\r\n };\r\n });\r\n }\r\n _splitSegmentsIntoSections(segments) {\r\n return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));\r\n }\r\n}\r\nexports.default = Matcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst matcher_1 = require(\"./matcher\");\r\nclass PartialMatcher extends matcher_1.default {\r\n match(filepath) {\r\n const parts = filepath.split('/');\r\n const levels = parts.length;\r\n const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);\r\n for (const pattern of patterns) {\r\n const section = pattern.sections[0];\r\n /**\r\n * In this case, the pattern has a globstar and we must read all directories unconditionally,\r\n * but only if the level has reached the end of the first group.\r\n *\r\n * fixtures/{a,b}/**\r\n * ^ true/false ^ always true\r\n */\r\n if (!pattern.complete && levels > section.length) {\r\n return true;\r\n }\r\n const match = parts.every((part, index) => {\r\n const segment = pattern.segments[index];\r\n if (segment.dynamic && segment.patternRe.test(part)) {\r\n return true;\r\n }\r\n if (!segment.dynamic && segment.pattern === part) {\r\n return true;\r\n }\r\n return false;\r\n });\r\n if (match) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n}\r\nexports.default = PartialMatcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst deep_1 = require(\"./filters/deep\");\r\nconst entry_1 = require(\"./filters/entry\");\r\nconst error_1 = require(\"./filters/error\");\r\nconst entry_2 = require(\"./transformers/entry\");\r\nclass Provider {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this.errorFilter = new error_1.default(this._settings);\r\n this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());\r\n this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());\r\n this.entryTransformer = new entry_2.default(this._settings);\r\n }\r\n _getRootDirectory(task) {\r\n return path.resolve(this._settings.cwd, task.base);\r\n }\r\n _getReaderOptions(task) {\r\n const basePath = task.base === '.' ? '' : task.base;\r\n return {\r\n basePath,\r\n pathSegmentSeparator: '/',\r\n concurrency: this._settings.concurrency,\r\n deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),\r\n entryFilter: this.entryFilter.getFilter(task.positive, task.negative),\r\n errorFilter: this.errorFilter.getFilter(),\r\n followSymbolicLinks: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n stats: this._settings.stats,\r\n throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,\r\n transform: this.entryTransformer.getTransformer()\r\n };\r\n }\r\n _getMicromatchOptions() {\r\n return {\r\n dot: this._settings.dot,\r\n matchBase: this._settings.baseNameMatch,\r\n nobrace: !this._settings.braceExpansion,\r\n nocase: !this._settings.caseSensitiveMatch,\r\n noext: !this._settings.extglob,\r\n noglobstar: !this._settings.globstar,\r\n posix: true,\r\n strictSlashes: false\r\n };\r\n }\r\n}\r\nexports.default = Provider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst stream_2 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderStream extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_2.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const source = this.api(root, task, options);\r\n const destination = new stream_1.Readable({ objectMode: true, read: () => { } });\r\n source\r\n .once('error', (error) => destination.emit('error', error))\r\n .on('data', (entry) => destination.emit('data', options.transform(entry)))\r\n .once('end', () => destination.emit('end'));\r\n destination\r\n .once('close', () => source.destroy());\r\n return destination;\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderSync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new sync_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = this.api(root, task, options);\r\n return entries.map(options.transform);\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryTransformer {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getTransformer() {\r\n return (entry) => this._transform(entry);\r\n }\r\n _transform(entry) {\r\n let filepath = entry.path;\r\n if (this._settings.absolute) {\r\n filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);\r\n filepath = utils.path.unixify(filepath);\r\n }\r\n if (this._settings.markDirectories && entry.dirent.isDirectory()) {\r\n filepath += '/';\r\n }\r\n if (!this._settings.objectMode) {\r\n return filepath;\r\n }\r\n return Object.assign(Object.assign({}, entry), { path: filepath });\r\n }\r\n}\r\nexports.default = EntryTransformer;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst utils = require(\"../utils\");\r\nclass Reader {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this._fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks\r\n });\r\n }\r\n _getFullEntryPath(filepath) {\r\n return path.resolve(this._settings.cwd, filepath);\r\n }\r\n _makeEntry(stats, pattern) {\r\n const entry = {\r\n name: pattern,\r\n path: pattern,\r\n dirent: utils.fs.createDirentFromStats(pattern, stats)\r\n };\r\n if (this._settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n }\r\n _isFatalError(error) {\r\n return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderStream extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkStream = fsWalk.walkStream;\r\n this._stat = fsStat.stat;\r\n }\r\n dynamic(root, options) {\r\n return this._walkStream(root, options);\r\n }\r\n static(patterns, options) {\r\n const filepaths = patterns.map(this._getFullEntryPath, this);\r\n const stream = new stream_1.PassThrough({ objectMode: true });\r\n stream._write = (index, _enc, done) => {\r\n return this._getEntry(filepaths[index], patterns[index], options)\r\n .then((entry) => {\r\n if (entry !== null && options.entryFilter(entry)) {\r\n stream.push(entry);\r\n }\r\n if (index === filepaths.length - 1) {\r\n stream.end();\r\n }\r\n done();\r\n })\r\n .catch(done);\r\n };\r\n for (let i = 0; i < filepaths.length; i++) {\r\n stream.write(i);\r\n }\r\n return stream;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n return this._getStat(filepath)\r\n .then((stats) => this._makeEntry(stats, pattern))\r\n .catch((error) => {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n });\r\n }\r\n _getStat(filepath) {\r\n return new Promise((resolve, reject) => {\r\n this._stat(filepath, this._fsStatSettings, (error, stats) => {\r\n return error === null ? resolve(stats) : reject(error);\r\n });\r\n });\r\n }\r\n}\r\nexports.default = ReaderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderSync extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkSync = fsWalk.walkSync;\r\n this._statSync = fsStat.statSync;\r\n }\r\n dynamic(root, options) {\r\n return this._walkSync(root, options);\r\n }\r\n static(patterns, options) {\r\n const entries = [];\r\n for (const pattern of patterns) {\r\n const filepath = this._getFullEntryPath(pattern);\r\n const entry = this._getEntry(filepath, pattern, options);\r\n if (entry === null || !options.entryFilter(entry)) {\r\n continue;\r\n }\r\n entries.push(entry);\r\n }\r\n return entries;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n try {\r\n const stats = this._getStat(filepath);\r\n return this._makeEntry(stats, pattern);\r\n }\r\n catch (error) {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n }\r\n }\r\n _getStat(filepath) {\r\n return this._statSync(filepath, this._fsStatSettings);\r\n }\r\n}\r\nexports.default = ReaderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nconst os = require(\"os\");\r\n/**\r\n * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.\r\n * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107\r\n */\r\nconst CPU_COUNT = Math.max(os.cpus().length, 1);\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n lstatSync: fs.lstatSync,\r\n stat: fs.stat,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.absolute = this._getValue(this._options.absolute, false);\r\n this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);\r\n this.braceExpansion = this._getValue(this._options.braceExpansion, true);\r\n this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);\r\n this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);\r\n this.cwd = this._getValue(this._options.cwd, process.cwd());\r\n this.deep = this._getValue(this._options.deep, Infinity);\r\n this.dot = this._getValue(this._options.dot, false);\r\n this.extglob = this._getValue(this._options.extglob, true);\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);\r\n this.fs = this._getFileSystemMethods(this._options.fs);\r\n this.globstar = this._getValue(this._options.globstar, true);\r\n this.ignore = this._getValue(this._options.ignore, []);\r\n this.markDirectories = this._getValue(this._options.markDirectories, false);\r\n this.objectMode = this._getValue(this._options.objectMode, false);\r\n this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);\r\n this.onlyFiles = this._getValue(this._options.onlyFiles, true);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.suppressErrors = this._getValue(this._options.suppressErrors, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);\r\n this.unique = this._getValue(this._options.unique, true);\r\n if (this.onlyDirectories) {\r\n this.onlyFiles = false;\r\n }\r\n if (this.stats) {\r\n this.objectMode = true;\r\n }\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n _getFileSystemMethods(methods = {}) {\r\n return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.splitWhen = exports.flatten = void 0;\r\nfunction flatten(items) {\r\n return items.reduce((collection, item) => [].concat(collection, item), []);\r\n}\r\nexports.flatten = flatten;\r\nfunction splitWhen(items, predicate) {\r\n const result = [[]];\r\n let groupIndex = 0;\r\n for (const item of items) {\r\n if (predicate(item)) {\r\n groupIndex++;\r\n result[groupIndex] = [];\r\n }\r\n else {\r\n result[groupIndex].push(item);\r\n }\r\n }\r\n return result;\r\n}\r\nexports.splitWhen = splitWhen;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEnoentCodeError = void 0;\r\nfunction isEnoentCodeError(error) {\r\n return error.code === 'ENOENT';\r\n}\r\nexports.isEnoentCodeError = isEnoentCodeError;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;\r\nconst array = require(\"./array\");\r\nexports.array = array;\r\nconst errno = require(\"./errno\");\r\nexports.errno = errno;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\nconst path = require(\"./path\");\r\nexports.path = path;\r\nconst pattern = require(\"./pattern\");\r\nexports.pattern = pattern;\r\nconst stream = require(\"./stream\");\r\nexports.stream = stream;\r\nconst string = require(\"./string\");\r\nexports.string = string;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;\r\nconst path = require(\"path\");\r\nconst LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\\\\r\nconst UNESCAPED_GLOB_SYMBOLS_RE = /(\\\\?)([()*?[\\]{|}]|^!|[!+@](?=\\())/g;\r\n/**\r\n * Designed to work only with simple paths: `dir\\\\file`.\r\n */\r\nfunction unixify(filepath) {\r\n return filepath.replace(/\\\\/g, '/');\r\n}\r\nexports.unixify = unixify;\r\nfunction makeAbsolute(cwd, filepath) {\r\n return path.resolve(cwd, filepath);\r\n}\r\nexports.makeAbsolute = makeAbsolute;\r\nfunction escape(pattern) {\r\n return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\\\$2');\r\n}\r\nexports.escape = escape;\r\nfunction removeLeadingDotSegment(entry) {\r\n // We do not use `startsWith` because this is 10x slower than current implementation for some cases.\r\n // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with\r\n if (entry.charAt(0) === '.') {\r\n const secondCharactery = entry.charAt(1);\r\n if (secondCharactery === '/' || secondCharactery === '\\\\') {\r\n return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);\r\n }\r\n }\r\n return entry;\r\n}\r\nexports.removeLeadingDotSegment = removeLeadingDotSegment;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;\r\nconst path = require(\"path\");\r\nconst globParent = require(\"glob-parent\");\r\nconst micromatch = require(\"micromatch\");\r\nconst picomatch = require(\"picomatch\");\r\nconst GLOBSTAR = '**';\r\nconst ESCAPE_SYMBOL = '\\\\';\r\nconst COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;\r\nconst REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\\[.*]/;\r\nconst REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\\(.*\\|.*\\)/;\r\nconst GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\\(.*\\)/;\r\nconst BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\\.\\.).*}/;\r\nfunction isStaticPattern(pattern, options = {}) {\r\n return !isDynamicPattern(pattern, options);\r\n}\r\nexports.isStaticPattern = isStaticPattern;\r\nfunction isDynamicPattern(pattern, options = {}) {\r\n /**\r\n * A special case with an empty string is necessary for matching patterns that start with a forward slash.\r\n * An empty string cannot be a dynamic pattern.\r\n * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.\r\n */\r\n if (pattern === '') {\r\n return false;\r\n }\r\n /**\r\n * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check\r\n * filepath directly (without read directory).\r\n */\r\n if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {\r\n return true;\r\n }\r\n if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n return false;\r\n}\r\nexports.isDynamicPattern = isDynamicPattern;\r\nfunction convertToPositivePattern(pattern) {\r\n return isNegativePattern(pattern) ? pattern.slice(1) : pattern;\r\n}\r\nexports.convertToPositivePattern = convertToPositivePattern;\r\nfunction convertToNegativePattern(pattern) {\r\n return '!' + pattern;\r\n}\r\nexports.convertToNegativePattern = convertToNegativePattern;\r\nfunction isNegativePattern(pattern) {\r\n return pattern.startsWith('!') && pattern[1] !== '(';\r\n}\r\nexports.isNegativePattern = isNegativePattern;\r\nfunction isPositivePattern(pattern) {\r\n return !isNegativePattern(pattern);\r\n}\r\nexports.isPositivePattern = isPositivePattern;\r\nfunction getNegativePatterns(patterns) {\r\n return patterns.filter(isNegativePattern);\r\n}\r\nexports.getNegativePatterns = getNegativePatterns;\r\nfunction getPositivePatterns(patterns) {\r\n return patterns.filter(isPositivePattern);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getBaseDirectory(pattern) {\r\n return globParent(pattern, { flipBackslashes: false });\r\n}\r\nexports.getBaseDirectory = getBaseDirectory;\r\nfunction hasGlobStar(pattern) {\r\n return pattern.includes(GLOBSTAR);\r\n}\r\nexports.hasGlobStar = hasGlobStar;\r\nfunction endsWithSlashGlobStar(pattern) {\r\n return pattern.endsWith('/' + GLOBSTAR);\r\n}\r\nexports.endsWithSlashGlobStar = endsWithSlashGlobStar;\r\nfunction isAffectDepthOfReadingPattern(pattern) {\r\n const basename = path.basename(pattern);\r\n return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);\r\n}\r\nexports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;\r\nfunction expandPatternsWithBraceExpansion(patterns) {\r\n return patterns.reduce((collection, pattern) => {\r\n return collection.concat(expandBraceExpansion(pattern));\r\n }, []);\r\n}\r\nexports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;\r\nfunction expandBraceExpansion(pattern) {\r\n return micromatch.braces(pattern, {\r\n expand: true,\r\n nodupes: true\r\n });\r\n}\r\nexports.expandBraceExpansion = expandBraceExpansion;\r\nfunction getPatternParts(pattern, options) {\r\n let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));\r\n /**\r\n * The scan method returns an empty array in some cases.\r\n * See micromatch/picomatch#58 for more details.\r\n */\r\n if (parts.length === 0) {\r\n parts = [pattern];\r\n }\r\n /**\r\n * The scan method does not return an empty part for the pattern with a forward slash.\r\n * This is another part of micromatch/picomatch#58.\r\n */\r\n if (parts[0].startsWith('/')) {\r\n parts[0] = parts[0].slice(1);\r\n parts.unshift('');\r\n }\r\n return parts;\r\n}\r\nexports.getPatternParts = getPatternParts;\r\nfunction makeRe(pattern, options) {\r\n return micromatch.makeRe(pattern, options);\r\n}\r\nexports.makeRe = makeRe;\r\nfunction convertPatternsToRe(patterns, options) {\r\n return patterns.map((pattern) => makeRe(pattern, options));\r\n}\r\nexports.convertPatternsToRe = convertPatternsToRe;\r\nfunction matchAny(entry, patternsRe) {\r\n return patternsRe.some((patternRe) => patternRe.test(entry));\r\n}\r\nexports.matchAny = matchAny;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.merge = void 0;\r\nconst merge2 = require(\"merge2\");\r\nfunction merge(streams) {\r\n const mergedStream = merge2(streams);\r\n streams.forEach((stream) => {\r\n stream.once('error', (error) => mergedStream.emit('error', error));\r\n });\r\n mergedStream.once('close', () => propagateCloseEventToSources(streams));\r\n mergedStream.once('end', () => propagateCloseEventToSources(streams));\r\n return mergedStream;\r\n}\r\nexports.merge = merge;\r\nfunction propagateCloseEventToSources(streams) {\r\n streams.forEach((stream) => stream.emit('close'));\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEmpty = exports.isString = void 0;\r\nfunction isString(input) {\r\n return typeof input === 'string';\r\n}\r\nexports.isString = isString;\r\nfunction isEmpty(input) {\r\n return input === '';\r\n}\r\nexports.isEmpty = isEmpty;\r\n","'use strict'\n\nvar reusify = require('reusify')\n\nfunction fastqueue (context, worker, concurrency) {\n if (typeof context === 'function') {\n concurrency = worker\n worker = context\n context = null\n }\n\n if (concurrency < 1) {\n throw new Error('fastqueue concurrency must be greater than 1')\n }\n\n var cache = reusify(Task)\n var queueHead = null\n var queueTail = null\n var _running = 0\n var errorHandler = null\n\n var self = {\n push: push,\n drain: noop,\n saturated: noop,\n pause: pause,\n paused: false,\n concurrency: concurrency,\n running: running,\n resume: resume,\n idle: idle,\n length: length,\n getQueue: getQueue,\n unshift: unshift,\n empty: noop,\n kill: kill,\n killAndDrain: killAndDrain,\n error: error\n }\n\n return self\n\n function running () {\n return _running\n }\n\n function pause () {\n self.paused = true\n }\n\n function length () {\n var current = queueHead\n var counter = 0\n\n while (current) {\n current = current.next\n counter++\n }\n\n return counter\n }\n\n function getQueue () {\n var current = queueHead\n var tasks = []\n\n while (current) {\n tasks.push(current.value)\n current = current.next\n }\n\n return tasks\n }\n\n function resume () {\n if (!self.paused) return\n self.paused = false\n for (var i = 0; i < self.concurrency; i++) {\n _running++\n release()\n }\n }\n\n function idle () {\n return _running === 0 && self.length() === 0\n }\n\n function push (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n current.errorHandler = errorHandler\n\n if (_running === self.concurrency || self.paused) {\n if (queueTail) {\n queueTail.next = current\n queueTail = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function unshift (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n\n if (_running === self.concurrency || self.paused) {\n if (queueHead) {\n current.next = queueHead\n queueHead = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function release (holder) {\n if (holder) {\n cache.release(holder)\n }\n var next = queueHead\n if (next) {\n if (!self.paused) {\n if (queueTail === queueHead) {\n queueTail = null\n }\n queueHead = next.next\n next.next = null\n worker.call(context, next.value, next.worked)\n if (queueTail === null) {\n self.empty()\n }\n } else {\n _running--\n }\n } else if (--_running === 0) {\n self.drain()\n }\n }\n\n function kill () {\n queueHead = null\n queueTail = null\n self.drain = noop\n }\n\n function killAndDrain () {\n queueHead = null\n queueTail = null\n self.drain()\n self.drain = noop\n }\n\n function error (handler) {\n errorHandler = handler\n }\n}\n\nfunction noop () {}\n\nfunction Task () {\n this.value = null\n this.callback = noop\n this.next = null\n this.release = noop\n this.context = null\n this.errorHandler = null\n\n var self = this\n\n this.worked = function worked (err, result) {\n var callback = self.callback\n var errorHandler = self.errorHandler\n var val = self.value\n self.value = null\n self.callback = noop\n if (self.errorHandler) {\n errorHandler(err, val)\n }\n callback.call(self.context, err, result)\n self.release(self)\n }\n}\n\nmodule.exports = fastqueue\n","'use strict';\n\nvar isGlob = require('is-glob');\nvar pathPosixDirname = require('path').posix.dirname;\nvar isWin32 = require('os').platform() === 'win32';\n\nvar slash = '/';\nvar backslash = /\\\\/g;\nvar enclosure = /[\\{\\[].*[\\/]*.*[\\}\\]]$/;\nvar globby = /(^|[^\\\\])([\\{\\[]|\\([^\\)]+$)/;\nvar escaped = /\\\\([\\!\\*\\?\\|\\[\\]\\(\\)\\{\\}])/g;\n\n/**\n * @param {string} str\n * @param {Object} opts\n * @param {boolean} [opts.flipBackslashes=true]\n */\nmodule.exports = function globParent(str, opts) {\n var options = Object.assign({ flipBackslashes: true }, opts);\n\n // flip windows path separators\n if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {\n str = str.replace(backslash, slash);\n }\n\n // special case for strings ending in enclosure containing path separator\n if (enclosure.test(str)) {\n str += slash;\n }\n\n // preserves full path in case of trailing path separator\n str += 'a';\n\n // remove path parts that are globby\n do {\n str = pathPosixDirname(str);\n } while (isGlob(str) || globby.test(str));\n\n // remove escape chars and return result\n return str.replace(escaped, '$1');\n};\n","/*!\n * is-extglob \n *\n * Copyright (c) 2014-2016, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\nmodule.exports = function isExtglob(str) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n var match;\n while ((match = /(\\\\).|([@?!+*]\\(.*\\))/g.exec(str))) {\n if (match[2]) return true;\n str = str.slice(match.index + match[0].length);\n }\n\n return false;\n};\n","/*!\n * is-glob \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nvar isExtglob = require('is-extglob');\nvar chars = { '{': '}', '(': ')', '[': ']'};\nvar strictRegex = /\\\\(.)|(^!|\\*|[\\].+)]\\?|\\[[^\\\\\\]]+\\]|\\{[^\\\\}]+\\}|\\(\\?[:!=][^\\\\)]+\\)|\\([^|]+\\|[^\\\\)]+\\))/;\nvar relaxedRegex = /\\\\(.)|(^!|[*?{}()[\\]]|\\(\\?)/;\n\nmodule.exports = function isGlob(str, options) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n if (isExtglob(str)) {\n return true;\n }\n\n var regex = strictRegex;\n var match;\n\n // optionally relax regex\n if (options && options.strict === false) {\n regex = relaxedRegex;\n }\n\n while ((match = regex.exec(str))) {\n if (match[2]) return true;\n var idx = match.index + match[0].length;\n\n // if an open bracket/brace/paren is escaped,\n // set the index to the next closing character\n var open = match[1];\n var close = open ? chars[open] : null;\n if (open && close) {\n var n = str.indexOf(close, idx);\n if (n !== -1) {\n idx = n + 1;\n }\n }\n\n str = str.slice(idx);\n }\n return false;\n};\n","'use strict'\n/*\n * merge2\n * https://github.com/teambition/merge2\n *\n * Copyright (c) 2014-2020 Teambition\n * Licensed under the MIT license.\n */\nconst Stream = require('stream')\nconst PassThrough = Stream.PassThrough\nconst slice = Array.prototype.slice\n\nmodule.exports = merge2\n\nfunction merge2 () {\n const streamsQueue = []\n const args = slice.call(arguments)\n let merging = false\n let options = args[args.length - 1]\n\n if (options && !Array.isArray(options) && options.pipe == null) {\n args.pop()\n } else {\n options = {}\n }\n\n const doEnd = options.end !== false\n const doPipeError = options.pipeError === true\n if (options.objectMode == null) {\n options.objectMode = true\n }\n if (options.highWaterMark == null) {\n options.highWaterMark = 64 * 1024\n }\n const mergedStream = PassThrough(options)\n\n function addStream () {\n for (let i = 0, len = arguments.length; i < len; i++) {\n streamsQueue.push(pauseStreams(arguments[i], options))\n }\n mergeStream()\n return this\n }\n\n function mergeStream () {\n if (merging) {\n return\n }\n merging = true\n\n let streams = streamsQueue.shift()\n if (!streams) {\n process.nextTick(endStream)\n return\n }\n if (!Array.isArray(streams)) {\n streams = [streams]\n }\n\n let pipesCount = streams.length + 1\n\n function next () {\n if (--pipesCount > 0) {\n return\n }\n merging = false\n mergeStream()\n }\n\n function pipe (stream) {\n function onend () {\n stream.removeListener('merge2UnpipeEnd', onend)\n stream.removeListener('end', onend)\n if (doPipeError) {\n stream.removeListener('error', onerror)\n }\n next()\n }\n function onerror (err) {\n mergedStream.emit('error', err)\n }\n // skip ended stream\n if (stream._readableState.endEmitted) {\n return next()\n }\n\n stream.on('merge2UnpipeEnd', onend)\n stream.on('end', onend)\n\n if (doPipeError) {\n stream.on('error', onerror)\n }\n\n stream.pipe(mergedStream, { end: false })\n // compatible for old stream\n stream.resume()\n }\n\n for (let i = 0; i < streams.length; i++) {\n pipe(streams[i])\n }\n\n next()\n }\n\n function endStream () {\n merging = false\n // emit 'queueDrain' when all streams merged.\n mergedStream.emit('queueDrain')\n if (doEnd) {\n mergedStream.end()\n }\n }\n\n mergedStream.setMaxListeners(0)\n mergedStream.add = addStream\n mergedStream.on('unpipe', function (stream) {\n stream.emit('merge2UnpipeEnd')\n })\n\n if (args.length) {\n addStream.apply(null, args)\n }\n return mergedStream\n}\n\n// check and pause streams for pipe.\nfunction pauseStreams (streams, options) {\n if (!Array.isArray(streams)) {\n // Backwards-compat with old-style streams\n if (!streams._readableState && streams.pipe) {\n streams = streams.pipe(PassThrough(options))\n }\n if (!streams._readableState || !streams.pause || !streams.pipe) {\n throw new Error('Only readable stream can be merged.')\n }\n streams.pause()\n } else {\n for (let i = 0, len = streams.length; i < len; i++) {\n streams[i] = pauseStreams(streams[i], options)\n }\n }\n return streams\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nmodule.exports = require('./lib/picomatch');\n","'use strict';\n\nconst path = require('path');\nconst WIN_SLASH = '\\\\\\\\/';\nconst WIN_NO_SLASH = `[^${WIN_SLASH}]`;\n\n/**\n * Posix glob regex\n */\n\nconst DOT_LITERAL = '\\\\.';\nconst PLUS_LITERAL = '\\\\+';\nconst QMARK_LITERAL = '\\\\?';\nconst SLASH_LITERAL = '\\\\/';\nconst ONE_CHAR = '(?=.)';\nconst QMARK = '[^/]';\nconst END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;\nconst START_ANCHOR = `(?:^|${SLASH_LITERAL})`;\nconst DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;\nconst NO_DOT = `(?!${DOT_LITERAL})`;\nconst NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;\nconst NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;\nconst NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;\nconst QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;\nconst STAR = `${QMARK}*?`;\n\nconst POSIX_CHARS = {\n DOT_LITERAL,\n PLUS_LITERAL,\n QMARK_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n QMARK,\n END_ANCHOR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n};\n\n/**\n * Windows glob regex\n */\n\nconst WINDOWS_CHARS = {\n ...POSIX_CHARS,\n\n SLASH_LITERAL: `[${WIN_SLASH}]`,\n QMARK: WIN_NO_SLASH,\n STAR: `${WIN_NO_SLASH}*?`,\n DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,\n NO_DOT: `(?!${DOT_LITERAL})`,\n NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,\n NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n QMARK_NO_DOT: `[^.${WIN_SLASH}]`,\n START_ANCHOR: `(?:^|[${WIN_SLASH}])`,\n END_ANCHOR: `(?:[${WIN_SLASH}]|$)`\n};\n\n/**\n * POSIX Bracket Regex\n */\n\nconst POSIX_REGEX_SOURCE = {\n alnum: 'a-zA-Z0-9',\n alpha: 'a-zA-Z',\n ascii: '\\\\x00-\\\\x7F',\n blank: ' \\\\t',\n cntrl: '\\\\x00-\\\\x1F\\\\x7F',\n digit: '0-9',\n graph: '\\\\x21-\\\\x7E',\n lower: 'a-z',\n print: '\\\\x20-\\\\x7E ',\n punct: '\\\\-!\"#$%&\\'()\\\\*+,./:;<=>?@[\\\\]^_`{|}~',\n space: ' \\\\t\\\\r\\\\n\\\\v\\\\f',\n upper: 'A-Z',\n word: 'A-Za-z0-9_',\n xdigit: 'A-Fa-f0-9'\n};\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n POSIX_REGEX_SOURCE,\n\n // regular expressions\n REGEX_BACKSLASH: /\\\\(?![*+?^${}(|)[\\]])/g,\n REGEX_NON_SPECIAL_CHARS: /^[^@![\\].,$*+?^{}()|\\\\/]+/,\n REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\\]]/,\n REGEX_SPECIAL_CHARS_BACKREF: /(\\\\?)((\\W)(\\3*))/g,\n REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\\]])/g,\n REGEX_REMOVE_BACKSLASH: /(?:\\[.*?[^\\\\]\\]|\\\\(?=.))/g,\n\n // Replace globs with equivalent patterns to reduce parsing time.\n REPLACEMENTS: {\n '***': '*',\n '**/**': '**',\n '**/**/**': '**'\n },\n\n // Digits\n CHAR_0: 48, /* 0 */\n CHAR_9: 57, /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 65, /* A */\n CHAR_LOWERCASE_A: 97, /* a */\n CHAR_UPPERCASE_Z: 90, /* Z */\n CHAR_LOWERCASE_Z: 122, /* z */\n\n CHAR_LEFT_PARENTHESES: 40, /* ( */\n CHAR_RIGHT_PARENTHESES: 41, /* ) */\n\n CHAR_ASTERISK: 42, /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: 38, /* & */\n CHAR_AT: 64, /* @ */\n CHAR_BACKWARD_SLASH: 92, /* \\ */\n CHAR_CARRIAGE_RETURN: 13, /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */\n CHAR_COLON: 58, /* : */\n CHAR_COMMA: 44, /* , */\n CHAR_DOT: 46, /* . */\n CHAR_DOUBLE_QUOTE: 34, /* \" */\n CHAR_EQUAL: 61, /* = */\n CHAR_EXCLAMATION_MARK: 33, /* ! */\n CHAR_FORM_FEED: 12, /* \\f */\n CHAR_FORWARD_SLASH: 47, /* / */\n CHAR_GRAVE_ACCENT: 96, /* ` */\n CHAR_HASH: 35, /* # */\n CHAR_HYPHEN_MINUS: 45, /* - */\n CHAR_LEFT_ANGLE_BRACKET: 60, /* < */\n CHAR_LEFT_CURLY_BRACE: 123, /* { */\n CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */\n CHAR_LINE_FEED: 10, /* \\n */\n CHAR_NO_BREAK_SPACE: 160, /* \\u00A0 */\n CHAR_PERCENT: 37, /* % */\n CHAR_PLUS: 43, /* + */\n CHAR_QUESTION_MARK: 63, /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */\n CHAR_RIGHT_CURLY_BRACE: 125, /* } */\n CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */\n CHAR_SEMICOLON: 59, /* ; */\n CHAR_SINGLE_QUOTE: 39, /* ' */\n CHAR_SPACE: 32, /* */\n CHAR_TAB: 9, /* \\t */\n CHAR_UNDERSCORE: 95, /* _ */\n CHAR_VERTICAL_LINE: 124, /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \\uFEFF */\n\n SEP: path.sep,\n\n /**\n * Create EXTGLOB_CHARS\n */\n\n extglobChars(chars) {\n return {\n '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },\n '?': { type: 'qmark', open: '(?:', close: ')?' },\n '+': { type: 'plus', open: '(?:', close: ')+' },\n '*': { type: 'star', open: '(?:', close: ')*' },\n '@': { type: 'at', open: '(?:', close: ')' }\n };\n },\n\n /**\n * Create GLOB_CHARS\n */\n\n globChars(win32) {\n return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;\n }\n};\n","'use strict';\n\nconst constants = require('./constants');\nconst utils = require('./utils');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n POSIX_REGEX_SOURCE,\n REGEX_NON_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_BACKREF,\n REPLACEMENTS\n} = constants;\n\n/**\n * Helpers\n */\n\nconst expandRange = (args, options) => {\n if (typeof options.expandRange === 'function') {\n return options.expandRange(...args, options);\n }\n\n args.sort();\n const value = `[${args.join('-')}]`;\n\n try {\n /* eslint-disable-next-line no-new */\n new RegExp(value);\n } catch (ex) {\n return args.map(v => utils.escapeRegex(v)).join('..');\n }\n\n return value;\n};\n\n/**\n * Create the message for a syntax error\n */\n\nconst syntaxError = (type, char) => {\n return `Missing ${type}: \"${char}\" - use \"\\\\\\\\${char}\" to match literal characters`;\n};\n\n/**\n * Parse the given input string.\n * @param {String} input\n * @param {Object} options\n * @return {Object}\n */\n\nconst parse = (input, options) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n input = REPLACEMENTS[input] || input;\n\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n\n let len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n const bos = { type: 'bos', value: '', output: opts.prepend || '' };\n const tokens = [bos];\n\n const capture = opts.capture ? '' : '?:';\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const PLATFORM_CHARS = constants.globChars(win32);\n const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);\n\n const {\n DOT_LITERAL,\n PLUS_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n } = PLATFORM_CHARS;\n\n const globstar = (opts) => {\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const nodot = opts.dot ? '' : NO_DOT;\n const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;\n let star = opts.bash === true ? globstar(opts) : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n // minimatch options support\n if (typeof opts.noext === 'boolean') {\n opts.noextglob = opts.noext;\n }\n\n const state = {\n input,\n index: -1,\n start: 0,\n dot: opts.dot === true,\n consumed: '',\n output: '',\n prefix: '',\n backtrack: false,\n negated: false,\n brackets: 0,\n braces: 0,\n parens: 0,\n quotes: 0,\n globstar: false,\n tokens\n };\n\n input = utils.removePrefix(input, state);\n len = input.length;\n\n const extglobs = [];\n const braces = [];\n const stack = [];\n let prev = bos;\n let value;\n\n /**\n * Tokenizing helpers\n */\n\n const eos = () => state.index === len - 1;\n const peek = state.peek = (n = 1) => input[state.index + n];\n const advance = state.advance = () => input[++state.index];\n const remaining = () => input.slice(state.index + 1);\n const consume = (value = '', num = 0) => {\n state.consumed += value;\n state.index += num;\n };\n const append = token => {\n state.output += token.output != null ? token.output : token.value;\n consume(token.value);\n };\n\n const negate = () => {\n let count = 1;\n\n while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {\n advance();\n state.start++;\n count++;\n }\n\n if (count % 2 === 0) {\n return false;\n }\n\n state.negated = true;\n state.start++;\n return true;\n };\n\n const increment = type => {\n state[type]++;\n stack.push(type);\n };\n\n const decrement = type => {\n state[type]--;\n stack.pop();\n };\n\n /**\n * Push tokens onto the tokens array. This helper speeds up\n * tokenizing by 1) helping us avoid backtracking as much as possible,\n * and 2) helping us avoid creating extra tokens when consecutive\n * characters are plain text. This improves performance and simplifies\n * lookbehinds.\n */\n\n const push = tok => {\n if (prev.type === 'globstar') {\n const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');\n const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));\n\n if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {\n state.output = state.output.slice(0, -prev.output.length);\n prev.type = 'star';\n prev.value = '*';\n prev.output = star;\n state.output += prev.output;\n }\n }\n\n if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) {\n extglobs[extglobs.length - 1].inner += tok.value;\n }\n\n if (tok.value || tok.output) append(tok);\n if (prev && prev.type === 'text' && tok.type === 'text') {\n prev.value += tok.value;\n prev.output = (prev.output || '') + tok.value;\n return;\n }\n\n tok.prev = prev;\n tokens.push(tok);\n prev = tok;\n };\n\n const extglobOpen = (type, value) => {\n const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };\n\n token.prev = prev;\n token.parens = state.parens;\n token.output = state.output;\n const output = (opts.capture ? '(' : '') + token.open;\n\n increment('parens');\n push({ type, value, output: state.output ? '' : ONE_CHAR });\n push({ type: 'paren', extglob: true, value: advance(), output });\n extglobs.push(token);\n };\n\n const extglobClose = token => {\n let output = token.close + (opts.capture ? ')' : '');\n\n if (token.type === 'negate') {\n let extglobStar = star;\n\n if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {\n extglobStar = globstar(opts);\n }\n\n if (extglobStar !== star || eos() || /^\\)+$/.test(remaining())) {\n output = token.close = `)$))${extglobStar}`;\n }\n\n if (token.prev.type === 'bos' && eos()) {\n state.negatedExtglob = true;\n }\n }\n\n push({ type: 'paren', extglob: true, value, output });\n decrement('parens');\n };\n\n /**\n * Fast paths\n */\n\n if (opts.fastpaths !== false && !/(^[*!]|[/()[\\]{}\"])/.test(input)) {\n let backslashes = false;\n\n let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {\n if (first === '\\\\') {\n backslashes = true;\n return m;\n }\n\n if (first === '?') {\n if (esc) {\n return esc + first + (rest ? QMARK.repeat(rest.length) : '');\n }\n if (index === 0) {\n return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');\n }\n return QMARK.repeat(chars.length);\n }\n\n if (first === '.') {\n return DOT_LITERAL.repeat(chars.length);\n }\n\n if (first === '*') {\n if (esc) {\n return esc + first + (rest ? star : '');\n }\n return star;\n }\n return esc ? m : `\\\\${m}`;\n });\n\n if (backslashes === true) {\n if (opts.unescape === true) {\n output = output.replace(/\\\\/g, '');\n } else {\n output = output.replace(/\\\\+/g, m => {\n return m.length % 2 === 0 ? '\\\\\\\\' : (m ? '\\\\' : '');\n });\n }\n }\n\n if (output === input && opts.contains === true) {\n state.output = input;\n return state;\n }\n\n state.output = utils.wrapOutput(output, state, options);\n return state;\n }\n\n /**\n * Tokenize input until we reach end-of-string\n */\n\n while (!eos()) {\n value = advance();\n\n if (value === '\\u0000') {\n continue;\n }\n\n /**\n * Escaped characters\n */\n\n if (value === '\\\\') {\n const next = peek();\n\n if (next === '/' && opts.bash !== true) {\n continue;\n }\n\n if (next === '.' || next === ';') {\n continue;\n }\n\n if (!next) {\n value += '\\\\';\n push({ type: 'text', value });\n continue;\n }\n\n // collapse slashes to reduce potential for exploits\n const match = /^\\\\+/.exec(remaining());\n let slashes = 0;\n\n if (match && match[0].length > 2) {\n slashes = match[0].length;\n state.index += slashes;\n if (slashes % 2 !== 0) {\n value += '\\\\';\n }\n }\n\n if (opts.unescape === true) {\n value = advance() || '';\n } else {\n value += advance() || '';\n }\n\n if (state.brackets === 0) {\n push({ type: 'text', value });\n continue;\n }\n }\n\n /**\n * If we're inside a regex character class, continue\n * until we reach the closing bracket.\n */\n\n if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {\n if (opts.posix !== false && value === ':') {\n const inner = prev.value.slice(1);\n if (inner.includes('[')) {\n prev.posix = true;\n\n if (inner.includes(':')) {\n const idx = prev.value.lastIndexOf('[');\n const pre = prev.value.slice(0, idx);\n const rest = prev.value.slice(idx + 2);\n const posix = POSIX_REGEX_SOURCE[rest];\n if (posix) {\n prev.value = pre + posix;\n state.backtrack = true;\n advance();\n\n if (!bos.output && tokens.indexOf(prev) === 1) {\n bos.output = ONE_CHAR;\n }\n continue;\n }\n }\n }\n }\n\n if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {\n value = `\\\\${value}`;\n }\n\n if (value === ']' && (prev.value === '[' || prev.value === '[^')) {\n value = `\\\\${value}`;\n }\n\n if (opts.posix === true && value === '!' && prev.value === '[') {\n value = '^';\n }\n\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * If we're inside a quoted string, continue\n * until we reach the closing double quote.\n */\n\n if (state.quotes === 1 && value !== '\"') {\n value = utils.escapeRegex(value);\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * Double quotes\n */\n\n if (value === '\"') {\n state.quotes = state.quotes === 1 ? 0 : 1;\n if (opts.keepQuotes === true) {\n push({ type: 'text', value });\n }\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === '(') {\n increment('parens');\n push({ type: 'paren', value });\n continue;\n }\n\n if (value === ')') {\n if (state.parens === 0 && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '('));\n }\n\n const extglob = extglobs[extglobs.length - 1];\n if (extglob && state.parens === extglob.parens + 1) {\n extglobClose(extglobs.pop());\n continue;\n }\n\n push({ type: 'paren', value, output: state.parens ? ')' : '\\\\)' });\n decrement('parens');\n continue;\n }\n\n /**\n * Square brackets\n */\n\n if (value === '[') {\n if (opts.nobracket === true || !remaining().includes(']')) {\n if (opts.nobracket !== true && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('closing', ']'));\n }\n\n value = `\\\\${value}`;\n } else {\n increment('brackets');\n }\n\n push({ type: 'bracket', value });\n continue;\n }\n\n if (value === ']') {\n if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n if (state.brackets === 0) {\n if (opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '['));\n }\n\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n decrement('brackets');\n\n const prevValue = prev.value.slice(1);\n if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {\n value = `/${value}`;\n }\n\n prev.value += value;\n append({ value });\n\n // when literal brackets are explicitly disabled\n // assume we should match with a regex character class\n if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {\n continue;\n }\n\n const escaped = utils.escapeRegex(prev.value);\n state.output = state.output.slice(0, -prev.value.length);\n\n // when literal brackets are explicitly enabled\n // assume we should escape the brackets to match literal characters\n if (opts.literalBrackets === true) {\n state.output += escaped;\n prev.value = escaped;\n continue;\n }\n\n // when the user specifies nothing, try to match both\n prev.value = `(${capture}${escaped}|${prev.value})`;\n state.output += prev.value;\n continue;\n }\n\n /**\n * Braces\n */\n\n if (value === '{' && opts.nobrace !== true) {\n increment('braces');\n\n const open = {\n type: 'brace',\n value,\n output: '(',\n outputIndex: state.output.length,\n tokensIndex: state.tokens.length\n };\n\n braces.push(open);\n push(open);\n continue;\n }\n\n if (value === '}') {\n const brace = braces[braces.length - 1];\n\n if (opts.nobrace === true || !brace) {\n push({ type: 'text', value, output: value });\n continue;\n }\n\n let output = ')';\n\n if (brace.dots === true) {\n const arr = tokens.slice();\n const range = [];\n\n for (let i = arr.length - 1; i >= 0; i--) {\n tokens.pop();\n if (arr[i].type === 'brace') {\n break;\n }\n if (arr[i].type !== 'dots') {\n range.unshift(arr[i].value);\n }\n }\n\n output = expandRange(range, opts);\n state.backtrack = true;\n }\n\n if (brace.comma !== true && brace.dots !== true) {\n const out = state.output.slice(0, brace.outputIndex);\n const toks = state.tokens.slice(brace.tokensIndex);\n brace.value = brace.output = '\\\\{';\n value = output = '\\\\}';\n state.output = out;\n for (const t of toks) {\n state.output += (t.output || t.value);\n }\n }\n\n push({ type: 'brace', value, output });\n decrement('braces');\n braces.pop();\n continue;\n }\n\n /**\n * Pipes\n */\n\n if (value === '|') {\n if (extglobs.length > 0) {\n extglobs[extglobs.length - 1].conditions++;\n }\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Commas\n */\n\n if (value === ',') {\n let output = value;\n\n const brace = braces[braces.length - 1];\n if (brace && stack[stack.length - 1] === 'braces') {\n brace.comma = true;\n output = '|';\n }\n\n push({ type: 'comma', value, output });\n continue;\n }\n\n /**\n * Slashes\n */\n\n if (value === '/') {\n // if the beginning of the glob is \"./\", advance the start\n // to the current index, and don't add the \"./\" characters\n // to the state. This greatly simplifies lookbehinds when\n // checking for BOS characters like \"!\" and \".\" (not \"./\")\n if (prev.type === 'dot' && state.index === state.start + 1) {\n state.start = state.index + 1;\n state.consumed = '';\n state.output = '';\n tokens.pop();\n prev = bos; // reset \"prev\" to the first token\n continue;\n }\n\n push({ type: 'slash', value, output: SLASH_LITERAL });\n continue;\n }\n\n /**\n * Dots\n */\n\n if (value === '.') {\n if (state.braces > 0 && prev.type === 'dot') {\n if (prev.value === '.') prev.output = DOT_LITERAL;\n const brace = braces[braces.length - 1];\n prev.type = 'dots';\n prev.output += value;\n prev.value += value;\n brace.dots = true;\n continue;\n }\n\n if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {\n push({ type: 'text', value, output: DOT_LITERAL });\n continue;\n }\n\n push({ type: 'dot', value, output: DOT_LITERAL });\n continue;\n }\n\n /**\n * Question marks\n */\n\n if (value === '?') {\n const isGroup = prev && prev.value === '(';\n if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('qmark', value);\n continue;\n }\n\n if (prev && prev.type === 'paren') {\n const next = peek();\n let output = value;\n\n if (next === '<' && !utils.supportsLookbehinds()) {\n throw new Error('Node.js v10 or higher is required for regex lookbehinds');\n }\n\n if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\\w+>)/.test(remaining()))) {\n output = `\\\\${value}`;\n }\n\n push({ type: 'text', value, output });\n continue;\n }\n\n if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {\n push({ type: 'qmark', value, output: QMARK_NO_DOT });\n continue;\n }\n\n push({ type: 'qmark', value, output: QMARK });\n continue;\n }\n\n /**\n * Exclamation\n */\n\n if (value === '!') {\n if (opts.noextglob !== true && peek() === '(') {\n if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {\n extglobOpen('negate', value);\n continue;\n }\n }\n\n if (opts.nonegate !== true && state.index === 0) {\n negate();\n continue;\n }\n }\n\n /**\n * Plus\n */\n\n if (value === '+') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('plus', value);\n continue;\n }\n\n if ((prev && prev.value === '(') || opts.regex === false) {\n push({ type: 'plus', value, output: PLUS_LITERAL });\n continue;\n }\n\n if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {\n push({ type: 'plus', value });\n continue;\n }\n\n push({ type: 'plus', value: PLUS_LITERAL });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value === '@') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n push({ type: 'at', extglob: true, value, output: '' });\n continue;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value !== '*') {\n if (value === '$' || value === '^') {\n value = `\\\\${value}`;\n }\n\n const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());\n if (match) {\n value += match[0];\n state.index += match[0].length;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Stars\n */\n\n if (prev && (prev.type === 'globstar' || prev.star === true)) {\n prev.type = 'star';\n prev.star = true;\n prev.value += value;\n prev.output = star;\n state.backtrack = true;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n let rest = remaining();\n if (opts.noextglob !== true && /^\\([^?]/.test(rest)) {\n extglobOpen('star', value);\n continue;\n }\n\n if (prev.type === 'star') {\n if (opts.noglobstar === true) {\n consume(value);\n continue;\n }\n\n const prior = prev.prev;\n const before = prior.prev;\n const isStart = prior.type === 'slash' || prior.type === 'bos';\n const afterStar = before && (before.type === 'star' || before.type === 'globstar');\n\n if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');\n const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');\n if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n // strip consecutive `/**/`\n while (rest.slice(0, 3) === '/**') {\n const after = input[state.index + 4];\n if (after && after !== '/') {\n break;\n }\n rest = rest.slice(3);\n consume('/**', 3);\n }\n\n if (prior.type === 'bos' && eos()) {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = globstar(opts);\n state.output = prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');\n prev.value += value;\n state.globstar = true;\n state.output += prior.output + prev.output;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {\n const end = rest[1] !== void 0 ? '|$' : '';\n\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;\n prev.value += value;\n\n state.output += prior.output + prev.output;\n state.globstar = true;\n\n consume(value + advance());\n\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n if (prior.type === 'bos' && rest[0] === '/') {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;\n state.output = prev.output;\n state.globstar = true;\n consume(value + advance());\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n // remove single star from output\n state.output = state.output.slice(0, -prev.output.length);\n\n // reset previous token to globstar\n prev.type = 'globstar';\n prev.output = globstar(opts);\n prev.value += value;\n\n // reset output with globstar\n state.output += prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n const token = { type: 'star', value, output: star };\n\n if (opts.bash === true) {\n token.output = '.*?';\n if (prev.type === 'bos' || prev.type === 'slash') {\n token.output = nodot + token.output;\n }\n push(token);\n continue;\n }\n\n if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {\n token.output = value;\n push(token);\n continue;\n }\n\n if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {\n if (prev.type === 'dot') {\n state.output += NO_DOT_SLASH;\n prev.output += NO_DOT_SLASH;\n\n } else if (opts.dot === true) {\n state.output += NO_DOTS_SLASH;\n prev.output += NO_DOTS_SLASH;\n\n } else {\n state.output += nodot;\n prev.output += nodot;\n }\n\n if (peek() !== '*') {\n state.output += ONE_CHAR;\n prev.output += ONE_CHAR;\n }\n }\n\n push(token);\n }\n\n while (state.brackets > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));\n state.output = utils.escapeLast(state.output, '[');\n decrement('brackets');\n }\n\n while (state.parens > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));\n state.output = utils.escapeLast(state.output, '(');\n decrement('parens');\n }\n\n while (state.braces > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));\n state.output = utils.escapeLast(state.output, '{');\n decrement('braces');\n }\n\n if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {\n push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });\n }\n\n // rebuild the output if we had to backtrack at any point\n if (state.backtrack === true) {\n state.output = '';\n\n for (const token of state.tokens) {\n state.output += token.output != null ? token.output : token.value;\n\n if (token.suffix) {\n state.output += token.suffix;\n }\n }\n }\n\n return state;\n};\n\n/**\n * Fast paths for creating regular expressions for common glob patterns.\n * This can significantly speed up processing and has very little downside\n * impact when none of the fast paths match.\n */\n\nparse.fastpaths = (input, options) => {\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n const len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n input = REPLACEMENTS[input] || input;\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const {\n DOT_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOTS_SLASH,\n STAR,\n START_ANCHOR\n } = constants.globChars(win32);\n\n const nodot = opts.dot ? NO_DOTS : NO_DOT;\n const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;\n const capture = opts.capture ? '' : '?:';\n const state = { negated: false, prefix: '' };\n let star = opts.bash === true ? '.*?' : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n const globstar = (opts) => {\n if (opts.noglobstar === true) return star;\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const create = str => {\n switch (str) {\n case '*':\n return `${nodot}${ONE_CHAR}${star}`;\n\n case '.*':\n return `${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*.*':\n return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*/*':\n return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;\n\n case '**':\n return nodot + globstar(opts);\n\n case '**/*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;\n\n case '**/*.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '**/.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n default: {\n const match = /^(.*?)\\.(\\w+)$/.exec(str);\n if (!match) return;\n\n const source = create(match[1]);\n if (!source) return;\n\n return source + DOT_LITERAL + match[2];\n }\n }\n };\n\n const output = utils.removePrefix(input, state);\n let source = create(output);\n\n if (source && opts.strictSlashes !== true) {\n source += `${SLASH_LITERAL}?`;\n }\n\n return source;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst path = require('path');\nconst scan = require('./scan');\nconst parse = require('./parse');\nconst utils = require('./utils');\nconst constants = require('./constants');\nconst isObject = val => val && typeof val === 'object' && !Array.isArray(val);\n\n/**\n * Creates a matcher function from one or more glob patterns. The\n * returned function takes a string to match as its first argument,\n * and returns true if the string is a match. The returned matcher\n * function also takes a boolean as the second argument that, when true,\n * returns an object with additional information.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch(glob[, options]);\n *\n * const isMatch = picomatch('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @name picomatch\n * @param {String|Array} `globs` One or more glob patterns.\n * @param {Object=} `options`\n * @return {Function=} Returns a matcher function.\n * @api public\n */\n\nconst picomatch = (glob, options, returnState = false) => {\n if (Array.isArray(glob)) {\n const fns = glob.map(input => picomatch(input, options, returnState));\n const arrayMatcher = str => {\n for (const isMatch of fns) {\n const state = isMatch(str);\n if (state) return state;\n }\n return false;\n };\n return arrayMatcher;\n }\n\n const isState = isObject(glob) && glob.tokens && glob.input;\n\n if (glob === '' || (typeof glob !== 'string' && !isState)) {\n throw new TypeError('Expected pattern to be a non-empty string');\n }\n\n const opts = options || {};\n const posix = utils.isWindows(options);\n const regex = isState\n ? picomatch.compileRe(glob, options)\n : picomatch.makeRe(glob, options, false, true);\n\n const state = regex.state;\n delete regex.state;\n\n let isIgnored = () => false;\n if (opts.ignore) {\n const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };\n isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);\n }\n\n const matcher = (input, returnObject = false) => {\n const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });\n const result = { glob, state, regex, posix, input, output, match, isMatch };\n\n if (typeof opts.onResult === 'function') {\n opts.onResult(result);\n }\n\n if (isMatch === false) {\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (isIgnored(input)) {\n if (typeof opts.onIgnore === 'function') {\n opts.onIgnore(result);\n }\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (typeof opts.onMatch === 'function') {\n opts.onMatch(result);\n }\n return returnObject ? result : true;\n };\n\n if (returnState) {\n matcher.state = state;\n }\n\n return matcher;\n};\n\n/**\n * Test `input` with the given `regex`. This is used by the main\n * `picomatch()` function to test the input string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.test(input, regex[, options]);\n *\n * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\\/([^/]*?))$/));\n * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp} `regex`\n * @return {Object} Returns an object with matching info.\n * @api public\n */\n\npicomatch.test = (input, regex, options, { glob, posix } = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected input to be a string');\n }\n\n if (input === '') {\n return { isMatch: false, output: '' };\n }\n\n const opts = options || {};\n const format = opts.format || (posix ? utils.toPosixSlashes : null);\n let match = input === glob;\n let output = (match && format) ? format(input) : input;\n\n if (match === false) {\n output = format ? format(input) : input;\n match = output === glob;\n }\n\n if (match === false || opts.capture === true) {\n if (opts.matchBase === true || opts.basename === true) {\n match = picomatch.matchBase(input, regex, options, posix);\n } else {\n match = regex.exec(output);\n }\n }\n\n return { isMatch: Boolean(match), match, output };\n};\n\n/**\n * Match the basename of a filepath.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.matchBase(input, glob[, options]);\n * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).\n * @return {Boolean}\n * @api public\n */\n\npicomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => {\n const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);\n return regex.test(path.basename(input));\n};\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.isMatch(string, patterns[, options]);\n *\n * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String|Array} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\npicomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const result = picomatch.parse(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as a regex source string.\n * @api public\n */\n\npicomatch.parse = (pattern, options) => {\n if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));\n return parse(pattern, { ...options, fastpaths: false });\n};\n\n/**\n * Scan a glob pattern to separate the pattern into segments.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.scan(input[, options]);\n *\n * const result = picomatch.scan('!./foo/*.js');\n * console.log(result);\n * { prefix: '!./',\n * input: '!./foo/*.js',\n * start: 3,\n * base: 'foo',\n * glob: '*.js',\n * isBrace: false,\n * isBracket: false,\n * isGlob: true,\n * isExtglob: false,\n * isGlobstar: false,\n * negated: true }\n * ```\n * @param {String} `input` Glob pattern to scan.\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\npicomatch.scan = (input, options) => scan(input, options);\n\n/**\n * Create a regular expression from a parsed glob pattern.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const state = picomatch.parse('*.js');\n * // picomatch.compileRe(state[, options]);\n *\n * console.log(picomatch.compileRe(state));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `state` The object returned from the `.parse` method.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\npicomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => {\n if (returnOutput === true) {\n return parsed.output;\n }\n\n const opts = options || {};\n const prepend = opts.contains ? '' : '^';\n const append = opts.contains ? '' : '$';\n\n let source = `${prepend}(?:${parsed.output})${append}`;\n if (parsed && parsed.negated === true) {\n source = `^(?!${source}).*$`;\n }\n\n const regex = picomatch.toRegex(source, options);\n if (returnState === true) {\n regex.state = parsed;\n }\n\n return regex;\n};\n\npicomatch.makeRe = (input, options, returnOutput = false, returnState = false) => {\n if (!input || typeof input !== 'string') {\n throw new TypeError('Expected a non-empty string');\n }\n\n const opts = options || {};\n let parsed = { negated: false, fastpaths: true };\n let prefix = '';\n let output;\n\n if (input.startsWith('./')) {\n input = input.slice(2);\n prefix = parsed.prefix = './';\n }\n\n if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {\n output = parse.fastpaths(input, options);\n }\n\n if (output === undefined) {\n parsed = parse(input, options);\n parsed.prefix = prefix + (parsed.prefix || '');\n } else {\n parsed.output = output;\n }\n\n return picomatch.compileRe(parsed, options, returnOutput, returnState);\n};\n\n/**\n * Create a regular expression from the given regex source string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.toRegex(source[, options]);\n *\n * const { output } = picomatch.parse('*.js');\n * console.log(picomatch.toRegex(output));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `source` Regular expression source string.\n * @param {Object} `options`\n * @return {RegExp}\n * @api public\n */\n\npicomatch.toRegex = (source, options) => {\n try {\n const opts = options || {};\n return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));\n } catch (err) {\n if (options && options.debug === true) throw err;\n return /$^/;\n }\n};\n\n/**\n * Picomatch constants.\n * @return {Object}\n */\n\npicomatch.constants = constants;\n\n/**\n * Expose \"picomatch\"\n */\n\nmodule.exports = picomatch;\n","'use strict';\n\nconst utils = require('./utils');\nconst {\n CHAR_ASTERISK, /* * */\n CHAR_AT, /* @ */\n CHAR_BACKWARD_SLASH, /* \\ */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_EXCLAMATION_MARK, /* ! */\n CHAR_FORWARD_SLASH, /* / */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_PLUS, /* + */\n CHAR_QUESTION_MARK, /* ? */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_RIGHT_SQUARE_BRACKET /* ] */\n} = require('./constants');\n\nconst isPathSeparator = code => {\n return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;\n};\n\nconst depth = token => {\n if (token.isPrefix !== true) {\n token.depth = token.isGlobstar ? Infinity : 1;\n }\n};\n\n/**\n * Quickly scans a glob pattern and returns an object with a handful of\n * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),\n * `glob` (the actual pattern), and `negated` (true if the path starts with `!`).\n *\n * ```js\n * const pm = require('picomatch');\n * console.log(pm.scan('foo/bar/*.js'));\n * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {Object} Returns an object with tokens and regex source string.\n * @api public\n */\n\nconst scan = (input, options) => {\n const opts = options || {};\n\n const length = input.length - 1;\n const scanToEnd = opts.parts === true || opts.scanToEnd === true;\n const slashes = [];\n const tokens = [];\n const parts = [];\n\n let str = input;\n let index = -1;\n let start = 0;\n let lastIndex = 0;\n let isBrace = false;\n let isBracket = false;\n let isGlob = false;\n let isExtglob = false;\n let isGlobstar = false;\n let braceEscaped = false;\n let backslashes = false;\n let negated = false;\n let finished = false;\n let braces = 0;\n let prev;\n let code;\n let token = { value: '', depth: 0, isGlob: false };\n\n const eos = () => index >= length;\n const peek = () => str.charCodeAt(index + 1);\n const advance = () => {\n prev = code;\n return str.charCodeAt(++index);\n };\n\n while (index < length) {\n code = advance();\n let next;\n\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braceEscaped = true;\n }\n continue;\n }\n\n if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n continue;\n }\n\n if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (braceEscaped !== true && code === CHAR_COMMA) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_RIGHT_CURLY_BRACE) {\n braces--;\n\n if (braces === 0) {\n braceEscaped = false;\n isBrace = token.isBrace = true;\n finished = true;\n break;\n }\n }\n }\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_FORWARD_SLASH) {\n slashes.push(index);\n tokens.push(token);\n token = { value: '', depth: 0, isGlob: false };\n\n if (finished === true) continue;\n if (prev === CHAR_DOT && index === (start + 1)) {\n start += 2;\n continue;\n }\n\n lastIndex = index + 1;\n continue;\n }\n\n if (opts.noext !== true) {\n const isExtglobChar = code === CHAR_PLUS\n || code === CHAR_AT\n || code === CHAR_ASTERISK\n || code === CHAR_QUESTION_MARK\n || code === CHAR_EXCLAMATION_MARK;\n\n if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n isExtglob = token.isExtglob = true;\n finished = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n isGlob = token.isGlob = true;\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n }\n\n if (code === CHAR_ASTERISK) {\n if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_QUESTION_MARK) {\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_LEFT_SQUARE_BRACKET) {\n while (eos() !== true && (next = advance())) {\n if (next === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n isBracket = token.isBracket = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n }\n }\n\n if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {\n negated = token.negated = true;\n start++;\n continue;\n }\n\n if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_LEFT_PARENTHESES) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n\n if (isGlob === true) {\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n }\n\n if (opts.noext === true) {\n isExtglob = false;\n isGlob = false;\n }\n\n let base = str;\n let prefix = '';\n let glob = '';\n\n if (start > 0) {\n prefix = str.slice(0, start);\n str = str.slice(start);\n lastIndex -= start;\n }\n\n if (base && isGlob === true && lastIndex > 0) {\n base = str.slice(0, lastIndex);\n glob = str.slice(lastIndex);\n } else if (isGlob === true) {\n base = '';\n glob = str;\n } else {\n base = str;\n }\n\n if (base && base !== '' && base !== '/' && base !== str) {\n if (isPathSeparator(base.charCodeAt(base.length - 1))) {\n base = base.slice(0, -1);\n }\n }\n\n if (opts.unescape === true) {\n if (glob) glob = utils.removeBackslashes(glob);\n\n if (base && backslashes === true) {\n base = utils.removeBackslashes(base);\n }\n }\n\n const state = {\n prefix,\n input,\n start,\n base,\n glob,\n isBrace,\n isBracket,\n isGlob,\n isExtglob,\n isGlobstar,\n negated\n };\n\n if (opts.tokens === true) {\n state.maxDepth = 0;\n if (!isPathSeparator(code)) {\n tokens.push(token);\n }\n state.tokens = tokens;\n }\n\n if (opts.parts === true || opts.tokens === true) {\n let prevIndex;\n\n for (let idx = 0; idx < slashes.length; idx++) {\n const n = prevIndex ? prevIndex + 1 : start;\n const i = slashes[idx];\n const value = input.slice(n, i);\n if (opts.tokens) {\n if (idx === 0 && start !== 0) {\n tokens[idx].isPrefix = true;\n tokens[idx].value = prefix;\n } else {\n tokens[idx].value = value;\n }\n depth(tokens[idx]);\n state.maxDepth += tokens[idx].depth;\n }\n if (idx !== 0 || value !== '') {\n parts.push(value);\n }\n prevIndex = i;\n }\n\n if (prevIndex && prevIndex + 1 < input.length) {\n const value = input.slice(prevIndex + 1);\n parts.push(value);\n\n if (opts.tokens) {\n tokens[tokens.length - 1].value = value;\n depth(tokens[tokens.length - 1]);\n state.maxDepth += tokens[tokens.length - 1].depth;\n }\n }\n\n state.slashes = slashes;\n state.parts = parts;\n }\n\n return state;\n};\n\nmodule.exports = scan;\n","'use strict';\n\nconst path = require('path');\nconst win32 = process.platform === 'win32';\nconst {\n REGEX_BACKSLASH,\n REGEX_REMOVE_BACKSLASH,\n REGEX_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_GLOBAL\n} = require('./constants');\n\nexports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\nexports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);\nexports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);\nexports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\\\$1');\nexports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');\n\nexports.removeBackslashes = str => {\n return str.replace(REGEX_REMOVE_BACKSLASH, match => {\n return match === '\\\\' ? '' : match;\n });\n};\n\nexports.supportsLookbehinds = () => {\n const segs = process.version.slice(1).split('.').map(Number);\n if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {\n return true;\n }\n return false;\n};\n\nexports.isWindows = options => {\n if (options && typeof options.windows === 'boolean') {\n return options.windows;\n }\n return win32 === true || path.sep === '\\\\';\n};\n\nexports.escapeLast = (input, char, lastIdx) => {\n const idx = input.lastIndexOf(char, lastIdx);\n if (idx === -1) return input;\n if (input[idx - 1] === '\\\\') return exports.escapeLast(input, char, idx - 1);\n return `${input.slice(0, idx)}\\\\${input.slice(idx)}`;\n};\n\nexports.removePrefix = (input, state = {}) => {\n let output = input;\n if (output.startsWith('./')) {\n output = output.slice(2);\n state.prefix = './';\n }\n return output;\n};\n\nexports.wrapOutput = (input, state = {}, options = {}) => {\n const prepend = options.contains ? '' : '^';\n const append = options.contains ? '' : '$';\n\n let output = `${prepend}(?:${input})${append}`;\n if (state.negated === true) {\n output = `(?:^(?!${output}).*$)`;\n }\n return output;\n};\n","'use strict'\n\nfunction reusify (Constructor) {\n var head = new Constructor()\n var tail = head\n\n function get () {\n var current = head\n\n if (current.next) {\n head = current.next\n } else {\n head = new Constructor()\n tail = head\n }\n\n current.next = null\n\n return current\n }\n\n function release (obj) {\n tail.next = obj\n tail = obj\n }\n\n return {\n get: get,\n release: release\n }\n}\n\nmodule.exports = reusify\n","/*! run-parallel. MIT License. Feross Aboukhadijeh */\nmodule.exports = runParallel\n\nfunction runParallel (tasks, cb) {\n var results, pending, keys\n var isSync = true\n\n if (Array.isArray(tasks)) {\n results = []\n pending = tasks.length\n } else {\n keys = Object.keys(tasks)\n results = {}\n pending = keys.length\n }\n\n function done (err) {\n function end () {\n if (cb) cb(err, results)\n cb = null\n }\n if (isSync) process.nextTick(end)\n else end()\n }\n\n function each (i, err, result) {\n results[i] = result\n if (--pending === 0 || err) {\n done(err)\n }\n }\n\n if (!pending) {\n // empty\n done(null)\n } else if (keys) {\n // object\n keys.forEach(function (key) {\n tasks[key](function (err, result) { each(key, err, result) })\n })\n } else {\n // array\n tasks.forEach(function (task, i) {\n task(function (err, result) { each(i, err, result) })\n })\n }\n\n isSync = false\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //