diff --git a/LICENSE b/LICENSE
index d81689a..4f87bd8 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,7 +1,7 @@
The MIT License (MIT)
-Copyright (c) 2020 Michal Dorner and contributors
+Copyright (c) 2021 Michal Dorner and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -19,4 +19,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
+THE SOFTWARE.
diff --git a/README.md b/README.md
index 1da73ab..862cb8f 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,180 @@
-**WIP**
+# Test Reporter
-# Test Check Report
+This [Github Action](https://github.com/features/actions) displays test results from popular testing frameworks directly in GitHub.
+- [x] Parses test results in XML or JSON format and creates nice report (Github Check Run)
+- [x] Annotates code where it failed based on message and stack trace captured during test execution
+- [x] Sets output variable conclusion to `success` if all tests passed or `failure` if any test failed
-Goal of this project is to create [Github Action](https://github.com/features/actions)
-that could presents test results from popular testing frameworks as Github check run with code annotation in places where test failed.
+**Supported languages / frameworks:**
+- .NET / [xUnit](https://xunit.net/) / [NUnit](https://nunit.org/) / [MSTest](https://github.com/Microsoft/testfx-docs)
+- Dart / [test](https://pub.dev/packages/test)
+- Flutter / [test](https://pub.dev/packages/test)
+- JavaScript / [JEST](https://jestjs.io/)
-Support for following test reports are planned for initial release:
-- [ ] dart-json: [`dart test --file-reporter=\"json:test-results.json`](https://pub.dev/packages/test)
-- [ ] dotnet-trx: [`dotnet test --logger "trx;LogFileName=test-results.trx"`](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
-- [ ] flutter-machine: [`flutter test --machine > test-results.json`](https://flutter.dev/docs/cookbook/testing/unit/introduction)
-- [ ] jest-junit: [`jest --ci --reporters=jest-junit`](https://github.com/jest-community/jest-junit#readme)
+For more information see [Supported formats](#supported-formats) section.
+
+**Support is planned for:**
+- Java / [JUnit 5](https://junit.org/junit5/)
+
+Do you miss support for your favorite language or framework?
+Please create [Issue](https://github.com/dorny/test-reporter/issues/new) or contribute with PR.
+
+## Example
+
+```yaml
+jobs:
+ build-test:
+ name: 'Build & Test'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2 # checkout the repo
+ - run: npm ci # install packages
+ - run: npm test # run tests (configured to use jest-junit reporter)
+
+ - name: 'Test Report'
+ uses: dorny/test-reporter@v1
+ if: always() # run this step even if previous step failed
+ with:
+ name: 'JEST Tests' # Name of the check run which will be created
+ path: '__reports__/jest-*.xml' # Path to test report
+ reporter: 'jest-junit' # Format of test report
+```
+
+## Usage
+
+```yaml
+- uses: dorny/test-reporter@v1
+ with:
+
+ # Name of the Check Run which will be created
+ name: ''
+
+ # Path to test report
+ # Supports wildcards via [fast-glob](https://github.com/mrmlnc/fast-glob)
+ # Path may match multiple result files of same format
+ path: ''
+
+ # Format of test report. Supported options:
+ # dart-json
+ # dotnet-trx
+ # flutter-machine
+ # jest-junit
+ reporter: ''
+
+ # Enables code annotations with error message and stack trace
+ annotations: 'true'
+
+ # Set action as failed if test report contain any failed test
+ fail-on-error: 'true'
+
+ # Relative path under $GITHUB_WORKSPACE where the repository was checked out.
+ working-directory: ''
+
+ # Personal access token used to interact with Github API
+ # Default: ${{ github.token }}
+ token: ''
+```
+
+## Supported formats
+
+
+ dart-json
+
+Test run must be configured to use [JSON](https://github.com/dart-lang/test/blob/master/pkgs/test/doc/configuration.md#reporter) reporter.
+You can configure it in `dart_test.yaml`:
+
+```yml
+file_reporters:
+ json: reports/test-results.json
+```
+
+Or with CLI arguments:
+
+[`dart test --file-reporter="json:test-results.json"`](https://pub.dev/packages/test)
+
+For more information see:
+- [test package](https://pub.dev/packages/test)
+- [test configuration](https://github.com/dart-lang/test/blob/master/pkgs/test/doc/configuration.md)
+
+
+
+ dotnet-trx
+
+Test execution must be configured to produce *Visual Studio Test Results* files (TRX).
+To get test results in TRX format you can execute your tests with CLI arguments:
+
+`dotnet test --logger "trx;LogFileName=test-results.trx"`
+
+Or you can configure TRX test output in `*.csproj` or `Directory.Build.props`:
+```xml
+
+ trx%3bLogFileName=$(MSBuildProjectName).trx
+ $(MSBuildThisFileDirectory)/reports
+
+```
+
+Supported testing frameworks:
+- [xUnit](https://xunit.net/)
+- [NUnit](https://nunit.org/)
+- [MSTest](https://github.com/Microsoft/testfx-docs)
+
+For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
+
+
+
+ flutter-machine
+
+Test run must be configured to use [JSON](https://github.com/dart-lang/test/blob/master/pkgs/test/doc/configuration.md#reporter) reporter.
+You can configure it in `dart_test.yaml`:
+```yml
+file_reporters:
+ json: reports/test-results.json
+```
+
+Or with (undocumented) CLI argument:
+
+`flutter test --machine > test-results.json`
+
+
+According to documentation `dart_test.yaml` should be at the root of the package, next to the package's pubspec.
+It works in dart projects but the file is ignored by flutter. With flutter it works when `dart_test.yaml` is placed inside your `test` folder.
+
+For more information see:
+- [test package](https://pub.dev/packages/test)
+- [test configuration](https://github.com/dart-lang/test/blob/master/pkgs/test/doc/configuration.md)
+- [flutter-cli](https://flutter.dev/docs/reference/flutter-cli)
+- [unit testing introduction](https://flutter.dev/docs/cookbook/testing/unit/introduction)
+
+
+
+
+ jest-junit
+
+[JEST](https://jestjs.io/) testing framework support requires usage of [jest-junit](https://github.com/jest-community/jest-junit) reporter.
+It will create test results in junit XML format which can be then processed by this action.
+You can use following example configuration in `package.json`:
+```json
+"scripts": {
+ "test": "jest --ci --reporters=default --reporters=jest-junit"
+},
+"devDependencies": {
+ "jest": "^26.5.3",
+ "jest-junit": "^12.0.0"
+},
+"jest-junit": {
+ "outputDirectory": "__reports__",
+ "outputName": "jest-junit.xml",
+ "ancestorSeparator": " › ",
+ "uniqueOutputName": "false",
+ "suiteNameTemplate": "{filepath}",
+ "classNameTemplate": "{classname}",
+ "titleTemplate": "{title}"
+}
+```
+
+Configuration of `uniqueOutputName`, `suiteNameTemplate`, `classNameTemplate`, `titleTemplate` is important for proper visualization of test results.
+
+
+## License
+
+The scripts and documentation in this project are released under the [MIT License](https://github.com/dorny/test-reporter/blob/master/LICENSE)
diff --git a/action.yml b/action.yml
index 5dff927..9a4c3f9 100644
--- a/action.yml
+++ b/action.yml
@@ -1,26 +1,22 @@
-name: 'Test Check Reporter'
+name: Test Reporter
description: |
- Presents test results from popular testing frameworks as Github check run.
- Supports:
- - dotnet (trx logger)
- - flutter (JSON output with '--machine' flag )
- - jest (jest-junit reporter)
-
-author: 'Michal Dorner '
+ Displays test results from popular testing frameworks directly in GitHub.
+ Supports .NET (xUnit, NUnit, MSTest), Dart, Flutter and JavaScript (JEST).
+author: Michal Dorner
inputs:
annotations:
- description: 'Annotate code where exceptions in tests were thrown'
+ description: Enables code annotations with error message and stack trace captured during test execution
required: true
default: 'true'
fail-on-error:
- description: 'Set this action as failed if test report contains any failed test'
+ description: Set this action as failed if test report contain any failed test
required: true
default: 'true'
name:
- description: 'Name of the check run'
+ description: Name of the check run
required: true
path:
- description: 'Path to test report'
+ description: Path to test report
required: true
reporter:
description: |
@@ -31,11 +27,11 @@ inputs:
- jest-junit
required: true
token:
- description: 'GitHub Access Token'
+ description: GitHub Access Token
required: false
default: ${{ github.token }}
working-directory:
- description: 'Relative path under $GITHUB_WORKSPACE where the repository was checked out.'
+ description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
required: false
outputs:
conclusion:
diff --git a/dist/index.js b/dist/index.js
index 19dc7d8..b0e7d84 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -3536,6 +3536,7 @@ function copyFile(srcFile, destFile, force) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = __nccwpck_require__(5747);
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
@@ -3562,6 +3563,7 @@ exports.createFileSystemAdapter = createFileSystemAdapter;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
@@ -3583,6 +3585,7 @@ exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_B
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Settings = exports.scandirSync = exports.scandir = void 0;
const async = __nccwpck_require__(4507);
const sync = __nccwpck_require__(9560);
const settings_1 = __nccwpck_require__(8662);
@@ -3615,10 +3618,12 @@ function getSettings(settingsOrOptions = {}) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = __nccwpck_require__(109);
const rpl = __nccwpck_require__(5288);
const constants_1 = __nccwpck_require__(8838);
const utils = __nccwpck_require__(6297);
+const common = __nccwpck_require__(3847);
function read(directory, settings, callback) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings, callback);
@@ -3634,7 +3639,7 @@ function readdirWithFileTypes(directory, settings, callback) {
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
- path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`
+ path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
return callSuccessCallback(callback, entries);
@@ -3671,7 +3676,7 @@ function readdir(directory, settings, callback) {
if (readdirError !== null) {
return callFailureCallback(callback, readdirError);
}
- const filepaths = names.map((name) => `${directory}${settings.pathSegmentSeparator}${name}`);
+ const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator));
const tasks = filepaths.map((filepath) => {
return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);
});
@@ -3705,6 +3710,27 @@ function callSuccessCallback(callback, result) {
}
+/***/ }),
+
+/***/ 3847:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.joinPathSegments = void 0;
+function joinPathSegments(a, b, separator) {
+ /**
+ * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
+ */
+ if (a.endsWith(separator)) {
+ return a + b;
+ }
+ return a + separator + b;
+}
+exports.joinPathSegments = joinPathSegments;
+
+
/***/ }),
/***/ 9560:
@@ -3713,9 +3739,11 @@ function callSuccessCallback(callback, result) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = __nccwpck_require__(109);
const constants_1 = __nccwpck_require__(8838);
const utils = __nccwpck_require__(6297);
+const common = __nccwpck_require__(3847);
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
@@ -3729,7 +3757,7 @@ function readdirWithFileTypes(directory, settings) {
const entry = {
dirent,
name: dirent.name,
- path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`
+ path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
@@ -3749,7 +3777,7 @@ exports.readdirWithFileTypes = readdirWithFileTypes;
function readdir(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
- const entryPath = `${directory}${settings.pathSegmentSeparator}${name}`;
+ const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
@@ -3791,7 +3819,7 @@ class Settings {
});
}
_getValue(option, value) {
- return option === undefined ? value : option;
+ return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
@@ -3805,6 +3833,7 @@ exports.default = Settings;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
@@ -3831,6 +3860,7 @@ exports.createDirentFromStats = createDirentFromStats;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.fs = void 0;
const fs = __nccwpck_require__(883);
exports.fs = fs;
@@ -3843,6 +3873,7 @@ exports.fs = fs;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = __nccwpck_require__(5747);
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
@@ -3867,6 +3898,7 @@ exports.createFileSystemAdapter = createFileSystemAdapter;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.statSync = exports.stat = exports.Settings = void 0;
const async = __nccwpck_require__(4147);
const sync = __nccwpck_require__(4527);
const settings_1 = __nccwpck_require__(2410);
@@ -3899,6 +3931,7 @@ function getSettings(settingsOrOptions = {}) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.read = void 0;
function read(path, settings, callback) {
settings.fs.lstat(path, (lstatError, lstat) => {
if (lstatError !== null) {
@@ -3938,6 +3971,7 @@ function callSuccessCallback(callback, result) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.read = void 0;
function read(path, settings) {
const lstat = settings.fs.lstatSync(path);
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
@@ -3978,7 +4012,7 @@ class Settings {
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
_getValue(option, value) {
- return option === undefined ? value : option;
+ return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
@@ -3992,6 +4026,7 @@ exports.default = Settings;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;
const async_1 = __nccwpck_require__(7523);
const stream_1 = __nccwpck_require__(6737);
const sync_1 = __nccwpck_require__(3068);
@@ -4080,7 +4115,11 @@ class StreamProvider {
this._stream = new stream_1.Readable({
objectMode: true,
read: () => { },
- destroy: this._reader.destroy.bind(this._reader)
+ destroy: () => {
+ if (!this._reader.isDestroyed) {
+ this._reader.destroy();
+ }
+ }
});
}
read() {
@@ -4158,6 +4197,9 @@ class AsyncReader extends reader_1.default {
});
return this._emitter;
}
+ get isDestroyed() {
+ return this._isDestroyed;
+ }
destroy() {
if (this._isDestroyed) {
throw new Error('The reader is already destroyed');
@@ -4194,7 +4236,7 @@ class AsyncReader extends reader_1.default {
});
}
_handleError(error) {
- if (!common.isFatalError(this._settings, error)) {
+ if (this._isDestroyed || !common.isFatalError(this._settings, error)) {
return;
}
this._isFatalError = true;
@@ -4231,6 +4273,7 @@ exports.default = AsyncReader;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;
function isFatalError(settings, error) {
if (settings.errorFilter === null) {
return true;
@@ -4243,13 +4286,19 @@ function isAppliedFilter(filter, value) {
}
exports.isAppliedFilter = isAppliedFilter;
function replacePathSegmentSeparator(filepath, separator) {
- return filepath.split(/[\\/]/).join(separator);
+ return filepath.split(/[/\\]/).join(separator);
}
exports.replacePathSegmentSeparator = replacePathSegmentSeparator;
function joinPathSegments(a, b, separator) {
if (a === '') {
return b;
}
+ /**
+ * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
+ */
+ if (a.endsWith(separator)) {
+ return a + b;
+ }
return a + separator + b;
}
exports.joinPathSegments = joinPathSegments;
@@ -4369,7 +4418,7 @@ class Settings {
});
}
_getValue(option, value) {
- return option === undefined ? value : option;
+ return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
@@ -9700,7 +9749,11 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = __nccwpck_require__(5747);
const os = __nccwpck_require__(2087);
-const CPU_COUNT = os.cpus().length;
+/**
+ * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
+ * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
+ */
+const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
@@ -10089,10 +10142,15 @@ function fastqueue (context, worker, concurrency) {
context = null
}
+ if (concurrency < 1) {
+ throw new Error('fastqueue concurrency must be greater than 1')
+ }
+
var cache = reusify(Task)
var queueHead = null
var queueTail = null
var _running = 0
+ var errorHandler = null
var self = {
push: push,
@@ -10109,7 +10167,8 @@ function fastqueue (context, worker, concurrency) {
unshift: unshift,
empty: noop,
kill: kill,
- killAndDrain: killAndDrain
+ killAndDrain: killAndDrain,
+ error: error
}
return self
@@ -10166,6 +10225,7 @@ function fastqueue (context, worker, concurrency) {
current.release = release
current.value = value
current.callback = done || noop
+ current.errorHandler = errorHandler
if (_running === self.concurrency || self.paused) {
if (queueTail) {
@@ -10241,6 +10301,10 @@ function fastqueue (context, worker, concurrency) {
self.drain()
self.drain = noop
}
+
+ function error (handler) {
+ errorHandler = handler
+ }
}
function noop () {}
@@ -10251,13 +10315,19 @@ function Task () {
this.next = null
this.release = noop
this.context = null
+ this.errorHandler = null
var self = this
this.worked = function worked (err, result) {
var callback = self.callback
+ var errorHandler = self.errorHandler
+ var val = self.value
self.value = null
self.callback = noop
+ if (self.errorHandler) {
+ errorHandler(err, val)
+ }
callback.call(self.context, err, result)
self.release(self)
}
@@ -14395,6 +14465,7 @@ module.exports = reusify
/***/ 5288:
/***/ ((module) => {
+/*! run-parallel. MIT License. Feross Aboukhadijeh */
module.exports = runParallel
function runParallel (tasks, cb) {
diff --git a/dist/index.js.map b/dist/index.js.map
index 0dd50da..3b009c0 100644
--- a/dist/index.js.map
+++ b/dist/index.js.map
@@ -1 +1 @@
-{"version":3,"file":"index.js","sources":["../webpack://test-check/./lib/main.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-parser.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-types.js","../webpack://test-check/./lib/parsers/dotnet-trx/dotnet-trx-parser.js","../webpack://test-check/./lib/parsers/jest-junit/jest-junit-parser.js","../webpack://test-check/./lib/report/get-report.js","../webpack://test-check/./lib/report/test-results.js","../webpack://test-check/./lib/utils/exec.js","../webpack://test-check/./lib/utils/file-utils.js","../webpack://test-check/./lib/utils/git.js","../webpack://test-check/./lib/utils/github-utils.js","../webpack://test-check/./lib/utils/markdown-utils.js","../webpack://test-check/./lib/utils/slugger.js","../webpack://test-check/./lib/utils/xml-utils.js","../webpack://test-check/./node_modules/@actions/core/lib/command.js","../webpack://test-check/./node_modules/@actions/core/lib/core.js","../webpack://test-check/./node_modules/@actions/core/lib/file-command.js","../webpack://test-check/./node_modules/@actions/core/lib/utils.js","../webpack://test-check/./node_modules/@actions/exec/lib/exec.js","../webpack://test-check/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://test-check/./node_modules/@actions/github/lib/context.js","../webpack://test-check/./node_modules/@actions/github/lib/github.js","../webpack://test-check/./node_modules/@actions/github/lib/internal/utils.js","../webpack://test-check/./node_modules/@actions/github/lib/utils.js","../webpack://test-check/./node_modules/@actions/http-client/index.js","../webpack://test-check/./node_modules/@actions/http-client/proxy.js","../webpack://test-check/./node_modules/@actions/io/lib/io-util.js","../webpack://test-check/./node_modules/@actions/io/lib/io.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/constants.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/stream.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/reader.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/settings.js","../webpack://test-check/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/core/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/before-after-hook/index.js","../webpack://test-check/./node_modules/before-after-hook/lib/add.js","../webpack://test-check/./node_modules/before-after-hook/lib/register.js","../webpack://test-check/./node_modules/before-after-hook/lib/remove.js","../webpack://test-check/./node_modules/deprecation/dist-node/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/compile.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/constants.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/expand.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/parse.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/stringify.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/utils.js","../webpack://test-check/./node_modules/fast-glob/node_modules/fill-range/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/is-number/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/micromatch/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/to-regex-range/index.js","../webpack://test-check/./node_modules/fast-glob/out/index.js","../webpack://test-check/./node_modules/fast-glob/out/managers/tasks.js","../webpack://test-check/./node_modules/fast-glob/out/providers/async.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/deep.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/entry.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/error.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/matcher.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/partial.js","../webpack://test-check/./node_modules/fast-glob/out/providers/provider.js","../webpack://test-check/./node_modules/fast-glob/out/providers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/providers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/providers/transformers/entry.js","../webpack://test-check/./node_modules/fast-glob/out/readers/reader.js","../webpack://test-check/./node_modules/fast-glob/out/readers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/readers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/settings.js","../webpack://test-check/./node_modules/fast-glob/out/utils/array.js","../webpack://test-check/./node_modules/fast-glob/out/utils/errno.js","../webpack://test-check/./node_modules/fast-glob/out/utils/fs.js","../webpack://test-check/./node_modules/fast-glob/out/utils/index.js","../webpack://test-check/./node_modules/fast-glob/out/utils/path.js","../webpack://test-check/./node_modules/fast-glob/out/utils/pattern.js","../webpack://test-check/./node_modules/fast-glob/out/utils/stream.js","../webpack://test-check/./node_modules/fast-glob/out/utils/string.js","../webpack://test-check/./node_modules/fastq/queue.js","../webpack://test-check/./node_modules/glob-parent/index.js","../webpack://test-check/./node_modules/is-extglob/index.js","../webpack://test-check/./node_modules/is-glob/index.js","../webpack://test-check/./node_modules/merge2/index.js","../webpack://test-check/./node_modules/node-fetch/lib/index.js","../webpack://test-check/./node_modules/once/once.js","../webpack://test-check/./node_modules/picomatch/index.js","../webpack://test-check/./node_modules/picomatch/lib/constants.js","../webpack://test-check/./node_modules/picomatch/lib/parse.js","../webpack://test-check/./node_modules/picomatch/lib/picomatch.js","../webpack://test-check/./node_modules/picomatch/lib/scan.js","../webpack://test-check/./node_modules/picomatch/lib/utils.js","../webpack://test-check/./node_modules/reusify/reusify.js","../webpack://test-check/./node_modules/run-parallel/index.js","../webpack://test-check/./node_modules/sax/lib/sax.js","../webpack://test-check/./node_modules/tunnel/index.js","../webpack://test-check/./node_modules/tunnel/lib/tunnel.js","../webpack://test-check/./node_modules/universal-user-agent/dist-node/index.js","../webpack://test-check/./node_modules/wrappy/wrappy.js","../webpack://test-check/./node_modules/xml2js/lib/bom.js","../webpack://test-check/./node_modules/xml2js/lib/builder.js","../webpack://test-check/./node_modules/xml2js/lib/defaults.js","../webpack://test-check/./node_modules/xml2js/lib/parser.js","../webpack://test-check/./node_modules/xml2js/lib/processors.js","../webpack://test-check/./node_modules/xml2js/lib/xml2js.js","../webpack://test-check/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://test-check/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/Utility.js","../webpack://test-check/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://test-check/./node_modules/xmlbuilder/lib/index.js","../webpack://test-check/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://test-check/external \"assert\"","../webpack://test-check/external \"child_process\"","../webpack://test-check/external \"events\"","../webpack://test-check/external \"fs\"","../webpack://test-check/external \"http\"","../webpack://test-check/external \"https\"","../webpack://test-check/external \"net\"","../webpack://test-check/external \"os\"","../webpack://test-check/external \"path\"","../webpack://test-check/external \"stream\"","../webpack://test-check/external \"string_decoder\"","../webpack://test-check/external \"timers\"","../webpack://test-check/external \"tls\"","../webpack://test-check/external \"url\"","../webpack://test-check/external \"util\"","../webpack://test-check/external \"zlib\"","../webpack://test-check/webpack/bootstrap","../webpack://test-check/webpack/runtime/compat","../webpack://test-check/webpack/startup"],"sourcesContent":["\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst fs = __importStar(require(\"fs\"));\r\nconst fast_glob_1 = __importDefault(require(\"fast-glob\"));\r\nconst dart_json_parser_1 = require(\"./parsers/dart-json/dart-json-parser\");\r\nconst dotnet_trx_parser_1 = require(\"./parsers/dotnet-trx/dotnet-trx-parser\");\r\nconst jest_junit_parser_1 = require(\"./parsers/jest-junit/jest-junit-parser\");\r\nconst file_utils_1 = require(\"./utils/file-utils\");\r\nconst git_1 = require(\"./utils/git\");\r\nconst github_utils_1 = require(\"./utils/github-utils\");\r\nasync function run() {\r\n try {\r\n await main();\r\n }\r\n catch (error) {\r\n core.setFailed(error.message);\r\n }\r\n}\r\nasync function main() {\r\n const annotations = core.getInput('annotations', { required: true }) === 'true';\r\n const failOnError = core.getInput('fail-on-error', { required: true }) === 'true';\r\n const name = core.getInput('name', { required: true });\r\n const path = core.getInput('path', { required: true });\r\n const reporter = core.getInput('reporter', { required: true });\r\n const token = core.getInput('token', { required: true });\r\n const workDirInput = core.getInput('working-directory', { required: false });\r\n if (workDirInput) {\r\n core.info(`Changing directory to ${workDirInput}`);\r\n process.chdir(workDirInput);\r\n }\r\n const workDir = file_utils_1.normalizeDirPath(process.cwd(), true);\r\n const octokit = github.getOctokit(token);\r\n const sha = github_utils_1.getCheckRunSha();\r\n // We won't need tracked files if we are not going to create annotations\r\n const trackedFiles = annotations ? await git_1.listFiles() : [];\r\n const opts = {\r\n name,\r\n annotations,\r\n trackedFiles,\r\n workDir\r\n };\r\n const parser = getParser(reporter);\r\n const files = await getFiles(path);\r\n if (files.length === 0) {\r\n core.setFailed(`No file matches path '${path}'`);\r\n return;\r\n }\r\n core.info(`Using test report parser '${reporter}'`);\r\n const result = await parser(files, opts);\r\n const conclusion = result.success ? 'success' : 'failure';\r\n core.info(`Creating check run '${name}' with conclusion '${conclusion}'`);\r\n await octokit.checks.create({\r\n head_sha: sha,\r\n name,\r\n conclusion,\r\n status: 'completed',\r\n output: result.output,\r\n ...github.context.repo\r\n });\r\n core.setOutput('conclusion', conclusion);\r\n if (failOnError && !result.success) {\r\n core.setFailed(`Failed test has been found and 'fail-on-error' option is set to ${failOnError}`);\r\n }\r\n}\r\nfunction getParser(reporter) {\r\n switch (reporter) {\r\n case 'dart-json':\r\n return dart_json_parser_1.parseDartJson;\r\n case 'dotnet-trx':\r\n return dotnet_trx_parser_1.parseDotnetTrx;\r\n case 'flutter-machine':\r\n return dart_json_parser_1.parseDartJson;\r\n case 'jest-junit':\r\n return jest_junit_parser_1.parseJestJunit;\r\n default:\r\n throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);\r\n }\r\n}\r\nasync function getFiles(pattern) {\r\n const paths = await fast_glob_1.default(pattern, { dot: true });\r\n return Promise.all(paths.map(async (path) => {\r\n core.info(`Reading test report '${path}'`);\r\n const content = await fs.promises.readFile(path, { encoding: 'utf8' });\r\n return { path, content };\r\n }));\r\n}\r\nexports.getFiles = getFiles;\r\nrun();\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.parseDartJson = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst dart_json_types_1 = require(\"./dart-json-types\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nclass TestRun {\r\n constructor(path, suites, success, time) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.success = success;\r\n this.time = time;\r\n }\r\n}\r\nclass TestSuite {\r\n constructor(suite) {\r\n this.suite = suite;\r\n this.groups = {};\r\n }\r\n}\r\nclass TestGroup {\r\n constructor(group) {\r\n this.group = group;\r\n this.tests = [];\r\n }\r\n}\r\nclass TestCase {\r\n constructor(testStart) {\r\n this.testStart = testStart;\r\n this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1];\r\n }\r\n get result() {\r\n var _a, _b, _c, _d;\r\n if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) {\r\n return 'skipped';\r\n }\r\n if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') {\r\n return 'success';\r\n }\r\n if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') {\r\n return 'failed';\r\n }\r\n return undefined;\r\n }\r\n get time() {\r\n return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;\r\n }\r\n}\r\nasync function parseDartJson(files, options) {\r\n const testRuns = files.map(f => getTestRun(f.path, f.content));\r\n const testRunsResults = testRuns.map(getTestRunResult);\r\n const success = testRuns.every(tr => tr.success);\r\n const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: get_report_1.default(testRunsResults),\r\n annotations: options.annotations ? getAnnotations(testRuns, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseDartJson = parseDartJson;\r\nfunction getTestRun(path, content) {\r\n core.info(`Parsing content of '${path}'`);\r\n const lines = content.split(/\\n\\r?/g);\r\n const events = lines\r\n .map((str, i) => {\r\n if (str.trim() === '') {\r\n return null;\r\n }\r\n try {\r\n return JSON.parse(str);\r\n }\r\n catch (e) {\r\n const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : '';\r\n new Error(`Invalid JSON at ${path}:${i + 1}${col}\\n\\n${e}`);\r\n }\r\n })\r\n .filter(evt => evt != null);\r\n let success = false;\r\n let totalTime = 0;\r\n const suites = {};\r\n const tests = {};\r\n for (const evt of events) {\r\n if (dart_json_types_1.isSuiteEvent(evt)) {\r\n suites[evt.suite.id] = new TestSuite(evt.suite);\r\n }\r\n else if (dart_json_types_1.isGroupEvent(evt)) {\r\n suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group);\r\n }\r\n else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) {\r\n const test = new TestCase(evt);\r\n const suite = suites[evt.test.suiteID];\r\n const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]];\r\n group.tests.push(test);\r\n tests[evt.test.id] = test;\r\n }\r\n else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden) {\r\n tests[evt.testID].testDone = evt;\r\n }\r\n else if (dart_json_types_1.isErrorEvent(evt)) {\r\n tests[evt.testID].error = evt;\r\n }\r\n else if (dart_json_types_1.isDoneEvent(evt)) {\r\n success = evt.success;\r\n totalTime = evt.time;\r\n }\r\n }\r\n return new TestRun(path, Object.values(suites), success, totalTime);\r\n}\r\nfunction getTestRunResult(tr) {\r\n const suites = tr.suites.map(s => {\r\n return new test_results_1.TestSuiteResult(s.suite.path, getGroups(s));\r\n });\r\n return new test_results_1.TestRunResult(tr.path, suites, tr.time);\r\n}\r\nfunction getGroups(suite) {\r\n const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);\r\n groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); });\r\n return groups.map(group => {\r\n group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); });\r\n const tests = group.tests.map(t => new test_results_1.TestCaseResult(t.testStart.test.name, t.result, t.time));\r\n return new test_results_1.TestGroupResult(group.group.name, tests);\r\n });\r\n}\r\nfunction getAnnotations(testRuns, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const tr of testRuns) {\r\n for (const suite of tr.suites) {\r\n for (const group of Object.values(suite.groups)) {\r\n for (const test of group.tests) {\r\n if (test.error) {\r\n const err = getAnnotation(test, suite, workDir, trackedFiles);\r\n if (err !== null) {\r\n annotations.push(err);\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction getAnnotation(test, testSuite, workDir, trackedFiles) {\r\n var _a, _b, _c, _d, _e, _f;\r\n const stack = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.stackTrace) !== null && _b !== void 0 ? _b : '';\r\n let src = exceptionThrowSource(stack, trackedFiles);\r\n if (src === null) {\r\n const file = getRelativePathFromUrl((_c = test.testStart.test.url) !== null && _c !== void 0 ? _c : '', workDir);\r\n if (!trackedFiles.includes(file)) {\r\n return null;\r\n }\r\n src = {\r\n file,\r\n line: (_d = test.testStart.test.line) !== null && _d !== void 0 ? _d : 0\r\n };\r\n }\r\n return {\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: `${markdown_utils_1.fixEol((_e = test.error) === null || _e === void 0 ? void 0 : _e.error)}\\n\\n${markdown_utils_1.fixEol((_f = test.error) === null || _f === void 0 ? void 0 : _f.stackTrace)}`,\r\n title: `[${testSuite.suite.path}] ${test.testStart.test.name}`\r\n };\r\n}\r\nfunction exceptionThrowSource(ex, trackedFiles) {\r\n // imports from package which is tested are listed in stack traces as 'package:xyz/' which maps to relative path 'lib/'\r\n const packageRe = /^package:[a-zA-z0-9_$]+\\//;\r\n const lines = ex.split(/\\r?\\n/).map(str => str.replace(packageRe, 'lib/'));\r\n // regexp to extract file path and line number from stack trace\r\n const re = /^(.*)\\s+(\\d+):\\d+\\s+/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const file = file_utils_1.normalizeFilePath(fileStr);\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { file, line };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nfunction getRelativePathFromUrl(file, workdir) {\r\n const prefix = 'file:///';\r\n if (file.startsWith(prefix)) {\r\n file = file.substr(prefix.length);\r\n }\r\n if (file.startsWith(workdir)) {\r\n file = file.substr(workdir.length);\r\n }\r\n return file;\r\n}\r\n","\"use strict\";\r\n/// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0;\r\nfunction isSuiteEvent(event) {\r\n return event.type === 'suite';\r\n}\r\nexports.isSuiteEvent = isSuiteEvent;\r\nfunction isGroupEvent(event) {\r\n return event.type === 'group';\r\n}\r\nexports.isGroupEvent = isGroupEvent;\r\nfunction isTestStartEvent(event) {\r\n return event.type === 'testStart';\r\n}\r\nexports.isTestStartEvent = isTestStartEvent;\r\nfunction isTestDoneEvent(event) {\r\n return event.type === 'testDone';\r\n}\r\nexports.isTestDoneEvent = isTestDoneEvent;\r\nfunction isErrorEvent(event) {\r\n return event.type === 'error';\r\n}\r\nexports.isErrorEvent = isErrorEvent;\r\nfunction isDoneEvent(event) {\r\n return event.type === 'done';\r\n}\r\nexports.isDoneEvent = isDoneEvent;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.exceptionThrowSource = exports.parseDotnetTrx = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst xml_utils_1 = require(\"../../utils/xml-utils\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nclass TestClass {\r\n constructor(name) {\r\n this.name = name;\r\n this.tests = [];\r\n }\r\n}\r\nclass Test {\r\n constructor(name, outcome, duration, error) {\r\n this.name = name;\r\n this.outcome = outcome;\r\n this.duration = duration;\r\n this.error = error;\r\n }\r\n get result() {\r\n switch (this.outcome) {\r\n case 'Passed':\r\n return 'success';\r\n case 'NotExecuted':\r\n return 'skipped';\r\n case 'Failed':\r\n return 'failed';\r\n }\r\n }\r\n}\r\nasync function parseDotnetTrx(files, options) {\r\n const testRuns = [];\r\n const testClasses = [];\r\n for (const file of files) {\r\n const trx = await getTrxReport(file);\r\n const tc = getTestClasses(trx);\r\n const tr = getTestRunResult(file.path, trx, tc);\r\n testRuns.push(tr);\r\n testClasses.push(...tc);\r\n }\r\n const success = testRuns.every(tr => tr.result === 'success');\r\n const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: get_report_1.default(testRuns),\r\n annotations: options.annotations ? getAnnotations(testClasses, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseDotnetTrx = parseDotnetTrx;\r\nasync function getTrxReport(file) {\r\n core.info(`Parsing content of '${file.path}'`);\r\n try {\r\n return (await xml2js_1.parseStringPromise(file.content, {\r\n attrValueProcessors: [xml_utils_1.parseAttribute]\r\n }));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${file.path}\\n\\n${e}`);\r\n }\r\n}\r\nfunction getTestRunResult(path, trx, testClasses) {\r\n const times = trx.TestRun.Times[0].$;\r\n const totalTime = times.finish.getTime() - times.start.getTime();\r\n const suites = testClasses.map(tc => {\r\n const tests = tc.tests.map(t => new test_results_1.TestCaseResult(t.name, t.result, t.duration));\r\n const group = new test_results_1.TestGroupResult(null, tests);\r\n return new test_results_1.TestSuiteResult(tc.name, [group]);\r\n });\r\n return new test_results_1.TestRunResult(path, suites, totalTime);\r\n}\r\nfunction getTestClasses(trx) {\r\n var _a;\r\n const unitTests = {};\r\n for (const td of trx.TestRun.TestDefinitions) {\r\n for (const ut of td.UnitTest) {\r\n unitTests[ut.$.id] = ut.TestMethod[0];\r\n }\r\n }\r\n const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(unitTestResult => ({\r\n unitTestResult,\r\n testMethod: unitTests[unitTestResult.$.testId]\r\n }));\r\n const testClasses = {};\r\n for (const r of unitTestsResults) {\r\n let tc = testClasses[r.testMethod.$.className];\r\n if (tc === undefined) {\r\n tc = new TestClass(r.testMethod.$.className);\r\n testClasses[tc.name] = tc;\r\n }\r\n const output = r.unitTestResult.Output;\r\n const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined;\r\n const test = new Test(r.testMethod.$.name, r.unitTestResult.$.outcome, r.unitTestResult.$.duration, error);\r\n tc.tests.push(test);\r\n }\r\n const result = Object.values(testClasses);\r\n result.sort((a, b) => a.name.localeCompare(b.name));\r\n for (const tc of result) {\r\n tc.tests.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n return result;\r\n}\r\nfunction getAnnotations(testClasses, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const tc of testClasses) {\r\n for (const t of tc.tests) {\r\n if (t.error) {\r\n const src = exceptionThrowSource(t.error.StackTrace[0], workDir, trackedFiles);\r\n if (src === null) {\r\n continue;\r\n }\r\n annotations.push({\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: markdown_utils_1.fixEol(t.error.Message[0]),\r\n title: `[${tc.name}] ${t.name}`\r\n });\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction exceptionThrowSource(ex, workDir, trackedFiles) {\r\n const lines = ex.split(/\\r*\\n/);\r\n const re = / in (.+):line (\\d+)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { file, line };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nexports.exceptionThrowSource = exceptionThrowSource;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.exceptionThrowSource = exports.parseJestJunit = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst xml_utils_1 = require(\"../../utils/xml-utils\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nasync function parseJestJunit(files, options) {\r\n const junit = [];\r\n const testRuns = [];\r\n for (const file of files) {\r\n const ju = await getJunitReport(file);\r\n const tr = getTestRunResult(file.path, ju);\r\n junit.push(ju);\r\n testRuns.push(tr);\r\n }\r\n const success = testRuns.every(tr => tr.result === 'success');\r\n const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: get_report_1.default(testRuns),\r\n annotations: options.annotations ? getAnnotations(junit, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseJestJunit = parseJestJunit;\r\nasync function getJunitReport(file) {\r\n core.info(`Parsing content of '${file.path}'`);\r\n try {\r\n return (await xml2js_1.parseStringPromise(file.content, {\r\n attrValueProcessors: [xml_utils_1.parseAttribute]\r\n }));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${file.path}\\n\\n${e}`);\r\n }\r\n}\r\nfunction getTestRunResult(path, junit) {\r\n const suites = junit.testsuites.testsuite.map(ts => {\r\n const name = ts.$.name.trim();\r\n const time = ts.$.time * 1000;\r\n const sr = new test_results_1.TestSuiteResult(name, getGroups(ts), time);\r\n return sr;\r\n });\r\n const time = junit.testsuites.$.time * 1000;\r\n return new test_results_1.TestRunResult(path, suites, time);\r\n}\r\nfunction getGroups(suite) {\r\n const groups = [];\r\n for (const tc of suite.testcase) {\r\n let grp = groups.find(g => g.describe === tc.$.classname);\r\n if (grp === undefined) {\r\n grp = { describe: tc.$.classname, tests: [] };\r\n groups.push(grp);\r\n }\r\n grp.tests.push(tc);\r\n }\r\n return groups.map(grp => {\r\n const tests = grp.tests.map(tc => {\r\n const name = tc.$.name.trim();\r\n const result = getTestCaseResult(tc);\r\n const time = tc.$.time * 1000;\r\n return new test_results_1.TestCaseResult(name, result, time);\r\n });\r\n return new test_results_1.TestGroupResult(grp.describe, tests);\r\n });\r\n}\r\nfunction getTestCaseResult(test) {\r\n if (test.failure)\r\n return 'failed';\r\n if (test.skipped)\r\n return 'skipped';\r\n return 'success';\r\n}\r\nfunction getAnnotations(junitReports, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const junit of junitReports) {\r\n for (const suite of junit.testsuites.testsuite) {\r\n for (const tc of suite.testcase) {\r\n if (!tc.failure) {\r\n continue;\r\n }\r\n for (const ex of tc.failure) {\r\n const src = exceptionThrowSource(ex, workDir, trackedFiles);\r\n if (src === null) {\r\n continue;\r\n }\r\n annotations.push({\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: markdown_utils_1.fixEol(ex),\r\n title: `[${suite.$.name}] ${tc.$.name.trim()}`\r\n });\r\n }\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction exceptionThrowSource(ex, workDir, trackedFiles) {\r\n const lines = ex.split(/\\r?\\n/);\r\n const re = /\\((.*):(\\d+):(\\d+)\\)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr, colStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n const column = parseInt(colStr);\r\n return { file, line, column };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nexports.exceptionThrowSource = exceptionThrowSource;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nconst slugger_1 = require(\"../utils/slugger\");\r\nfunction getReport(results) {\r\n const runsSummary = results.map(getRunSummary).join('\\n\\n');\r\n const suites = results\r\n .flatMap(tr => tr.suites)\r\n .map((ts, i) => getSuiteSummary(ts, i))\r\n .join('\\n');\r\n const suitesSection = `# Test Suites\\n\\n${suites}`;\r\n return [runsSummary, suitesSection].join('\\n\\n');\r\n}\r\nexports.default = getReport;\r\nfunction getRunSummary(tr) {\r\n core.info('Generating check run summary');\r\n const time = `${(tr.time / 1000).toFixed(3)}s`;\r\n const headingLine1 = `### ${tr.path}`;\r\n const headingLine2 = `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.skipped}** skipped and **${tr.failed}** failed.`;\r\n const suitesSummary = tr.suites.map((s, i) => {\r\n const icon = getResultIcon(s.result);\r\n const tsTime = `${s.time}ms`;\r\n const tsName = s.name;\r\n const tsAddr = makeSuiteSlug(i, tsName).link;\r\n const tsNameLink = markdown_utils_1.link(tsName, tsAddr);\r\n return [icon, tsNameLink, s.tests, tsTime, s.passed, s.skipped, s.failed];\r\n });\r\n const summary = markdown_utils_1.table(['Result', 'Suite', 'Tests', 'Time', `Passed ${markdown_utils_1.Icon.success}`, `Skipped ${markdown_utils_1.Icon.skip}`, `Failed ${markdown_utils_1.Icon.fail}`], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suitesSummary);\r\n return [headingLine1, headingLine2, summary].join('\\n\\n');\r\n}\r\nfunction getSuiteSummary(ts, index) {\r\n const icon = getResultIcon(ts.result);\r\n const content = ts.groups\r\n .map(grp => {\r\n const header = grp.name ? `### ${grp.name}\\n\\n` : '';\r\n const tests = markdown_utils_1.table(['Result', 'Test', 'Time'], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right], ...grp.tests.map(tc => {\r\n const name = tc.name;\r\n const time = `${tc.time}ms`;\r\n const result = getResultIcon(tc.result);\r\n return [result, name, time];\r\n }));\r\n return `${header}${tests}\\n`;\r\n })\r\n .join('\\n');\r\n const tsName = ts.name;\r\n const tsSlug = makeSuiteSlug(index, tsName);\r\n const tsNameLink = `${tsName}`;\r\n return `## ${tsNameLink} ${icon}\\n\\n${content}`;\r\n}\r\nfunction makeSuiteSlug(index, name) {\r\n // use \"ts-$index-\" as prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`ts-${index}-${name}`);\r\n}\r\nfunction getResultIcon(result) {\r\n switch (result) {\r\n case 'success':\r\n return markdown_utils_1.Icon.success;\r\n case 'skipped':\r\n return markdown_utils_1.Icon.skip;\r\n case 'failed':\r\n return markdown_utils_1.Icon.fail;\r\n default:\r\n return '';\r\n }\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;\r\nclass TestRunResult {\r\n constructor(path, suites, totalTime) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.suites.reduce((sum, g) => sum + g.tests, 0);\r\n }\r\n get passed() {\r\n return this.suites.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.suites.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.suites.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.suites.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestRunResult = TestRunResult;\r\nclass TestSuiteResult {\r\n constructor(name, groups, totalTime) {\r\n this.name = name;\r\n this.groups = groups;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.groups.reduce((sum, g) => sum + g.tests.length, 0);\r\n }\r\n get passed() {\r\n return this.groups.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.groups.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.groups.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestSuiteResult = TestSuiteResult;\r\nclass TestGroupResult {\r\n constructor(name, tests) {\r\n this.name = name;\r\n this.tests = tests;\r\n }\r\n get passed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0);\r\n }\r\n get failed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0);\r\n }\r\n get skipped() {\r\n return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0);\r\n }\r\n get time() {\r\n return this.tests.reduce((sum, t) => sum + t.time, 0);\r\n }\r\n get result() {\r\n return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestGroupResult = TestGroupResult;\r\nclass TestCaseResult {\r\n constructor(name, result, time) {\r\n this.name = name;\r\n this.result = result;\r\n this.time = time;\r\n }\r\n}\r\nexports.TestCaseResult = TestCaseResult;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst exec_1 = require(\"@actions/exec\");\r\n// Wraps original exec() function\r\n// Returns exit code and whole stdout/stderr\r\nasync function exec(commandLine, args, options) {\r\n options = options || {};\r\n let stdout = '';\r\n let stderr = '';\r\n options.listeners = {\r\n stdout: (data) => (stdout += data.toString()),\r\n stderr: (data) => (stderr += data.toString())\r\n };\r\n const code = await exec_1.exec(commandLine, args, options);\r\n return { code, stdout, stderr };\r\n}\r\nexports.default = exec;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.normalizeFilePath = exports.normalizeDirPath = void 0;\r\nfunction normalizeDirPath(path, addTrailingSlash) {\r\n if (!path) {\r\n return path;\r\n }\r\n path = normalizeFilePath(path);\r\n if (addTrailingSlash && !path.endsWith('/')) {\r\n path += '/';\r\n }\r\n return path;\r\n}\r\nexports.normalizeDirPath = normalizeDirPath;\r\nfunction normalizeFilePath(path) {\r\n if (!path) {\r\n return path;\r\n }\r\n return path.trim().replace(/\\\\/g, '/');\r\n}\r\nexports.normalizeFilePath = normalizeFilePath;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.listFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst exec_1 = __importDefault(require(\"./exec\"));\r\nasync function listFiles() {\r\n core.startGroup('Listing all files tracked by git');\r\n let output = '';\r\n try {\r\n output = (await exec_1.default('git', ['ls-files', '-z'])).stdout;\r\n }\r\n finally {\r\n fixStdOutNullTermination();\r\n core.endGroup();\r\n }\r\n return output.split('\\u0000').filter(s => s.length > 0);\r\n}\r\nexports.listFiles = listFiles;\r\nfunction fixStdOutNullTermination() {\r\n // Previous command uses NULL as delimiters and output is printed to stdout.\r\n // We have to make sure next thing written to stdout will start on new line.\r\n // Otherwise things like ::set-output wouldn't work.\r\n core.info('');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getCheckRunSha = void 0;\r\nconst github = __importStar(require(\"@actions/github\"));\r\nfunction getCheckRunSha() {\r\n if (github.context.payload.pull_request) {\r\n const pr = github.context.payload.pull_request;\r\n return pr.head.sha;\r\n }\r\n return github.context.sha;\r\n}\r\nexports.getCheckRunSha = getCheckRunSha;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.fixEol = exports.tableEscape = exports.table = exports.link = exports.details = exports.Icon = exports.Align = void 0;\r\nvar Align;\r\n(function (Align) {\r\n Align[\"Left\"] = \":---\";\r\n Align[\"Center\"] = \":---:\";\r\n Align[\"Right\"] = \"---:\";\r\n Align[\"None\"] = \"---\";\r\n})(Align = exports.Align || (exports.Align = {}));\r\nexports.Icon = {\r\n skip: '✖️',\r\n success: '✔️',\r\n fail: '❌' // ':x:'\r\n};\r\nfunction details(summary, content) {\r\n return `${summary}
${content} `;\r\n}\r\nexports.details = details;\r\nfunction link(title, address) {\r\n return `[${title}](${address})`;\r\n}\r\nexports.link = link;\r\nfunction table(headers, align, ...rows) {\r\n const headerRow = `| ${headers.map(tableEscape).join(' | ')} |`;\r\n const alignRow = `| ${align.join(' | ')} |`;\r\n const contentRows = rows.map(row => `| ${row.map(tableEscape).join(' | ')} |`).join('\\n');\r\n return [headerRow, alignRow, contentRows].join('\\n');\r\n}\r\nexports.table = table;\r\nfunction tableEscape(content) {\r\n return content.toString().replace('|', '\\\\|');\r\n}\r\nexports.tableEscape = tableEscape;\r\nfunction fixEol(text) {\r\n var _a;\r\n return (_a = text === null || text === void 0 ? void 0 : text.replace(/\\r/g, '')) !== null && _a !== void 0 ? _a : '';\r\n}\r\nexports.fixEol = fixEol;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.slug = void 0;\r\n// Returns HTML element id and href link usable as manual anchor links\r\n// This is needed because Github in check run summary doesn't automatically\r\n// create links out of headings as it normally does for other markdown content\r\nfunction slug(name) {\r\n const slugId = name\r\n .trim()\r\n .replace(/_/g, '')\r\n .replace(/[./\\\\]/g, '-')\r\n .replace(/[^\\w-]/g, '');\r\n const id = `user-content-${slugId}`;\r\n const link = `#${slugId}`;\r\n return { id, link };\r\n}\r\nexports.slug = slug;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.parseAttribute = void 0;\r\nconst isoDateRe = /^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)$/;\r\n// matches dotnet duration: 00:00:00.0010000\r\nconst durationRe = /^(\\d\\d):(\\d\\d):(\\d\\d\\.\\d+)$/;\r\nfunction parseAttribute(str) {\r\n if (str === '' || str === undefined) {\r\n return str;\r\n }\r\n if (isoDateRe.test(str)) {\r\n return new Date(str);\r\n }\r\n const durationMatch = str.match(durationRe);\r\n if (durationMatch !== null) {\r\n const [_, hourStr, minStr, secStr] = durationMatch;\r\n return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000;\r\n }\r\n const num = parseFloat(str);\r\n if (isNaN(num)) {\r\n return str;\r\n }\r\n return num;\r\n}\r\nexports.parseAttribute = parseAttribute;\r\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');\r\nconst MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);\r\nconst MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);\r\nconst SUPPORTED_MAJOR_VERSION = 10;\r\nconst SUPPORTED_MINOR_VERSION = 10;\r\nconst IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;\r\nconst IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;\r\n/**\r\n * IS `true` for Node.js 10.10 and greater.\r\n */\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction scandir(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.scandir = scandir;\r\nfunction scandirSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.scandirSync = scandirSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst rpl = require(\"run-parallel\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nfunction read(directory, settings, callback) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings, callback);\r\n }\r\n return readdir(directory, settings, callback);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings, callback) {\r\n settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const entries = dirents.map((dirent) => ({\r\n dirent,\r\n name: dirent.name,\r\n path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`\r\n }));\r\n if (!settings.followSymbolicLinks) {\r\n return callSuccessCallback(callback, entries);\r\n }\r\n const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));\r\n rpl(tasks, (rplError, rplEntries) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n callSuccessCallback(callback, rplEntries);\r\n });\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction makeRplTaskEntry(entry, settings) {\r\n return (done) => {\r\n if (!entry.dirent.isSymbolicLink()) {\r\n return done(null, entry);\r\n }\r\n settings.fs.stat(entry.path, (statError, stats) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return done(statError);\r\n }\r\n return done(null, entry);\r\n }\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n return done(null, entry);\r\n });\r\n };\r\n}\r\nfunction readdir(directory, settings, callback) {\r\n settings.fs.readdir(directory, (readdirError, names) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const filepaths = names.map((name) => `${directory}${settings.pathSegmentSeparator}${name}`);\r\n const tasks = filepaths.map((filepath) => {\r\n return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);\r\n });\r\n rpl(tasks, (rplError, results) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n const entries = [];\r\n names.forEach((name, index) => {\r\n const stats = results[index];\r\n const entry = {\r\n name,\r\n path: filepaths[index],\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n entries.push(entry);\r\n });\r\n callSuccessCallback(callback, entries);\r\n });\r\n });\r\n}\r\nexports.readdir = readdir;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nfunction read(directory, settings) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings);\r\n }\r\n return readdir(directory, settings);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings) {\r\n const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });\r\n return dirents.map((dirent) => {\r\n const entry = {\r\n dirent,\r\n name: dirent.name,\r\n path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`\r\n };\r\n if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {\r\n try {\r\n const stats = settings.fs.statSync(entry.path);\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n }\r\n catch (error) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n throw error;\r\n }\r\n }\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction readdir(directory, settings) {\r\n const names = settings.fs.readdirSync(directory);\r\n return names.map((name) => {\r\n const entryPath = `${directory}${settings.pathSegmentSeparator}${name}`;\r\n const stats = fsStat.statSync(entryPath, settings.fsStatSettings);\r\n const entry = {\r\n name,\r\n path: entryPath,\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdir = readdir;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n this.fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this.followSymbolicLinks,\r\n fs: this.fs,\r\n throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction stat(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.stat = stat;\r\nfunction statSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.statSync = statSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nfunction read(path, settings, callback) {\r\n settings.fs.lstat(path, (lstatError, lstat) => {\r\n if (lstatError !== null) {\r\n return callFailureCallback(callback, lstatError);\r\n }\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n settings.fs.stat(path, (statError, stat) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return callFailureCallback(callback, statError);\r\n }\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n callSuccessCallback(callback, stat);\r\n });\r\n });\r\n}\r\nexports.read = read;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nfunction read(path, settings) {\r\n const lstat = settings.fs.lstatSync(path);\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return lstat;\r\n }\r\n try {\r\n const stat = settings.fs.statSync(path);\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n return stat;\r\n }\r\n catch (error) {\r\n if (!settings.throwErrorOnBrokenSymbolicLink) {\r\n return lstat;\r\n }\r\n throw error;\r\n }\r\n}\r\nexports.read = read;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction walk(directory, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);\r\n }\r\n new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);\r\n}\r\nexports.walk = walk;\r\nfunction walkSync(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new sync_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkSync = walkSync;\r\nfunction walkStream(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new stream_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkStream = walkStream;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async_1 = require(\"../readers/async\");\r\nclass AsyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._storage = new Set();\r\n }\r\n read(callback) {\r\n this._reader.onError((error) => {\r\n callFailureCallback(callback, error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._storage.add(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n callSuccessCallback(callback, [...this._storage]);\r\n });\r\n this._reader.read();\r\n }\r\n}\r\nexports.default = AsyncProvider;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, entries) {\r\n callback(null, entries);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst async_1 = require(\"../readers/async\");\r\nclass StreamProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._stream = new stream_1.Readable({\r\n objectMode: true,\r\n read: () => { },\r\n destroy: this._reader.destroy.bind(this._reader)\r\n });\r\n }\r\n read() {\r\n this._reader.onError((error) => {\r\n this._stream.emit('error', error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._stream.push(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n this._stream.push(null);\r\n });\r\n this._reader.read();\r\n return this._stream;\r\n }\r\n}\r\nexports.default = StreamProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nclass SyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new sync_1.default(this._root, this._settings);\r\n }\r\n read() {\r\n return this._reader.read();\r\n }\r\n}\r\nexports.default = SyncProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst events_1 = require(\"events\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst fastq = require(\"fastq\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass AsyncReader extends reader_1.default {\r\n constructor(_root, _settings) {\r\n super(_root, _settings);\r\n this._settings = _settings;\r\n this._scandir = fsScandir.scandir;\r\n this._emitter = new events_1.EventEmitter();\r\n this._queue = fastq(this._worker.bind(this), this._settings.concurrency);\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n this._queue.drain = () => {\r\n if (!this._isFatalError) {\r\n this._emitter.emit('end');\r\n }\r\n };\r\n }\r\n read() {\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n setImmediate(() => {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n });\r\n return this._emitter;\r\n }\r\n destroy() {\r\n if (this._isDestroyed) {\r\n throw new Error('The reader is already destroyed');\r\n }\r\n this._isDestroyed = true;\r\n this._queue.killAndDrain();\r\n }\r\n onEntry(callback) {\r\n this._emitter.on('entry', callback);\r\n }\r\n onError(callback) {\r\n this._emitter.once('error', callback);\r\n }\r\n onEnd(callback) {\r\n this._emitter.once('end', callback);\r\n }\r\n _pushToQueue(directory, base) {\r\n const queueItem = { directory, base };\r\n this._queue.push(queueItem, (error) => {\r\n if (error !== null) {\r\n this._handleError(error);\r\n }\r\n });\r\n }\r\n _worker(item, done) {\r\n this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {\r\n if (error !== null) {\r\n return done(error, undefined);\r\n }\r\n for (const entry of entries) {\r\n this._handleEntry(entry, item.base);\r\n }\r\n done(null, undefined);\r\n });\r\n }\r\n _handleError(error) {\r\n if (!common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n this._isFatalError = true;\r\n this._isDestroyed = true;\r\n this._emitter.emit('error', error);\r\n }\r\n _handleEntry(entry, base) {\r\n if (this._isDestroyed || this._isFatalError) {\r\n return;\r\n }\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._emitEntry(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _emitEntry(entry) {\r\n this._emitter.emit('entry', entry);\r\n }\r\n}\r\nexports.default = AsyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nfunction isFatalError(settings, error) {\r\n if (settings.errorFilter === null) {\r\n return true;\r\n }\r\n return !settings.errorFilter(error);\r\n}\r\nexports.isFatalError = isFatalError;\r\nfunction isAppliedFilter(filter, value) {\r\n return filter === null || filter(value);\r\n}\r\nexports.isAppliedFilter = isAppliedFilter;\r\nfunction replacePathSegmentSeparator(filepath, separator) {\r\n return filepath.split(/[\\\\/]/).join(separator);\r\n}\r\nexports.replacePathSegmentSeparator = replacePathSegmentSeparator;\r\nfunction joinPathSegments(a, b, separator) {\r\n if (a === '') {\r\n return b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst common = require(\"./common\");\r\nclass Reader {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass SyncReader extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._scandir = fsScandir.scandirSync;\r\n this._storage = new Set();\r\n this._queue = new Set();\r\n }\r\n read() {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n this._handleQueue();\r\n return [...this._storage];\r\n }\r\n _pushToQueue(directory, base) {\r\n this._queue.add({ directory, base });\r\n }\r\n _handleQueue() {\r\n for (const item of this._queue.values()) {\r\n this._handleDirectory(item.directory, item.base);\r\n }\r\n }\r\n _handleDirectory(directory, base) {\r\n try {\r\n const entries = this._scandir(directory, this._settings.fsScandirSettings);\r\n for (const entry of entries) {\r\n this._handleEntry(entry, base);\r\n }\r\n }\r\n catch (error) {\r\n this._handleError(error);\r\n }\r\n }\r\n _handleError(error) {\r\n if (!common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n throw error;\r\n }\r\n _handleEntry(entry, base) {\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._pushToStorage(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _pushToStorage(entry) {\r\n this._storage.add(entry);\r\n }\r\n}\r\nexports.default = SyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.basePath = this._getValue(this._options.basePath, undefined);\r\n this.concurrency = this._getValue(this._options.concurrency, Infinity);\r\n this.deepFilter = this._getValue(this._options.deepFilter, null);\r\n this.entryFilter = this._getValue(this._options.entryFilter, null);\r\n this.errorFilter = this._getValue(this._options.errorFilter, null);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.fsScandirSettings = new fsScandir.Settings({\r\n followSymbolicLinks: this._options.followSymbolicLinks,\r\n fs: this._options.fs,\r\n pathSegmentSeparator: this._options.pathSegmentSeparator,\r\n stats: this._options.stats,\r\n throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n}\r\nexports.default = Settings;\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.2.1\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, [\"authStrategy\"]);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.9\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.5.7\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.6.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.paginateRest = paginateRest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }]\n },\n codeScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/{archive_format}/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\", {\n mediaType: {\n previews: [\"black-panther\"]\n }\n }],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"4.2.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n/**\n * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary\n * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is\n * done, we will remove the registerEndpoints methods and return the methods\n * directly as with the other plugins. At that point we will also remove the\n * legacy workarounds and deprecations.\n *\n * See the plan at\n * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1\n */\n\nfunction restEndpointMethods(octokit) {\n return endpointsToMethods(octokit, Endpoints);\n}\nrestEndpointMethods.VERSION = VERSION;\n\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.10\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook\n\nfunction addHook (state, kind, name, hook) {\n var orig = hook\n if (!state.registry[name]) {\n state.registry[name] = []\n }\n\n if (kind === 'before') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options))\n }\n }\n\n if (kind === 'after') {\n hook = function (method, options) {\n var result\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_\n return orig(result, options)\n })\n .then(function () {\n return result\n })\n }\n }\n\n if (kind === 'error') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options)\n })\n }\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig\n })\n}\n","module.exports = register\n\nfunction register (state, name, method, options) {\n if (typeof method !== 'function') {\n throw new Error('method for before hook must be a function')\n }\n\n if (!options) {\n options = {}\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options)\n }, method)()\n }\n\n return Promise.resolve()\n .then(function () {\n if (!state.registry[name]) {\n return method(options)\n }\n\n return (state.registry[name]).reduce(function (method, registered) {\n return registered.hook.bind(null, method, options)\n }, method)()\n })\n}\n","module.exports = removeHook\n\nfunction removeHook (state, name, method) {\n if (!state.registry[name]) {\n return\n }\n\n var index = state.registry[name]\n .map(function (registered) { return registered.orig })\n .indexOf(method)\n\n if (index === -1) {\n return\n }\n\n state.registry[name].splice(index, 1)\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nconst stringify = require('./lib/stringify');\nconst compile = require('./lib/compile');\nconst expand = require('./lib/expand');\nconst parse = require('./lib/parse');\n\n/**\n * Expand the given pattern or create a regex-compatible string.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']\n * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {String}\n * @api public\n */\n\nconst braces = (input, options = {}) => {\n let output = [];\n\n if (Array.isArray(input)) {\n for (let pattern of input) {\n let result = braces.create(pattern, options);\n if (Array.isArray(result)) {\n output.push(...result);\n } else {\n output.push(result);\n }\n }\n } else {\n output = [].concat(braces.create(input, options));\n }\n\n if (options && options.expand === true && options.nodupes === true) {\n output = [...new Set(output)];\n }\n return output;\n};\n\n/**\n * Parse the given `str` with the given `options`.\n *\n * ```js\n * // braces.parse(pattern, [, options]);\n * const ast = braces.parse('a/{b,c}/d');\n * console.log(ast);\n * ```\n * @param {String} pattern Brace pattern to parse\n * @param {Object} options\n * @return {Object} Returns an AST\n * @api public\n */\n\nbraces.parse = (input, options = {}) => parse(input, options);\n\n/**\n * Creates a braces string from an AST, or an AST node.\n *\n * ```js\n * const braces = require('braces');\n * let ast = braces.parse('foo/{a,b}/bar');\n * console.log(stringify(ast.nodes[2])); //=> '{a,b}'\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.stringify = (input, options = {}) => {\n if (typeof input === 'string') {\n return stringify(braces.parse(input, options), options);\n }\n return stringify(input, options);\n};\n\n/**\n * Compiles a brace pattern into a regex-compatible, optimized string.\n * This method is called by the main [braces](#braces) function by default.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.compile('a/{b,c}/d'));\n * //=> ['a/(b|c)/d']\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.compile = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n return compile(input, options);\n};\n\n/**\n * Expands a brace pattern into an array. This method is called by the\n * main [braces](#braces) function when `options.expand` is true. Before\n * using this method it's recommended that you read the [performance notes](#performance))\n * and advantages of using [.compile](#compile) instead.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.expand('a/{b,c}/d'));\n * //=> ['a/b/d', 'a/c/d'];\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.expand = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n\n let result = expand(input, options);\n\n // filter out empty strings if specified\n if (options.noempty === true) {\n result = result.filter(Boolean);\n }\n\n // filter out duplicates if specified\n if (options.nodupes === true) {\n result = [...new Set(result)];\n }\n\n return result;\n};\n\n/**\n * Processes a brace pattern and returns either an expanded array\n * (if `options.expand` is true), a highly optimized regex-compatible string.\n * This method is called by the main [braces](#braces) function.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))\n * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.create = (input, options = {}) => {\n if (input === '' || input.length < 3) {\n return [input];\n }\n\n return options.expand !== true\n ? braces.compile(input, options)\n : braces.expand(input, options);\n};\n\n/**\n * Expose \"braces\"\n */\n\nmodule.exports = braces;\n","'use strict';\n\nconst fill = require('fill-range');\nconst utils = require('./utils');\n\nconst compile = (ast, options = {}) => {\n let walk = (node, parent = {}) => {\n let invalidBlock = utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let invalid = invalidBlock === true || invalidNode === true;\n let prefix = options.escapeInvalid === true ? '\\\\' : '';\n let output = '';\n\n if (node.isOpen === true) {\n return prefix + node.value;\n }\n if (node.isClose === true) {\n return prefix + node.value;\n }\n\n if (node.type === 'open') {\n return invalid ? (prefix + node.value) : '(';\n }\n\n if (node.type === 'close') {\n return invalid ? (prefix + node.value) : ')';\n }\n\n if (node.type === 'comma') {\n return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n let range = fill(...args, { ...options, wrap: false, toRegex: true });\n\n if (range.length !== 0) {\n return args.length > 1 && range.length > 1 ? `(${range})` : range;\n }\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += walk(child, node);\n }\n }\n return output;\n };\n\n return walk(ast);\n};\n\nmodule.exports = compile;\n","'use strict';\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n\n // Digits\n CHAR_0: '0', /* 0 */\n CHAR_9: '9', /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 'A', /* A */\n CHAR_LOWERCASE_A: 'a', /* a */\n CHAR_UPPERCASE_Z: 'Z', /* Z */\n CHAR_LOWERCASE_Z: 'z', /* z */\n\n CHAR_LEFT_PARENTHESES: '(', /* ( */\n CHAR_RIGHT_PARENTHESES: ')', /* ) */\n\n CHAR_ASTERISK: '*', /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: '&', /* & */\n CHAR_AT: '@', /* @ */\n CHAR_BACKSLASH: '\\\\', /* \\ */\n CHAR_BACKTICK: '`', /* ` */\n CHAR_CARRIAGE_RETURN: '\\r', /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */\n CHAR_COLON: ':', /* : */\n CHAR_COMMA: ',', /* , */\n CHAR_DOLLAR: '$', /* . */\n CHAR_DOT: '.', /* . */\n CHAR_DOUBLE_QUOTE: '\"', /* \" */\n CHAR_EQUAL: '=', /* = */\n CHAR_EXCLAMATION_MARK: '!', /* ! */\n CHAR_FORM_FEED: '\\f', /* \\f */\n CHAR_FORWARD_SLASH: '/', /* / */\n CHAR_HASH: '#', /* # */\n CHAR_HYPHEN_MINUS: '-', /* - */\n CHAR_LEFT_ANGLE_BRACKET: '<', /* < */\n CHAR_LEFT_CURLY_BRACE: '{', /* { */\n CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */\n CHAR_LINE_FEED: '\\n', /* \\n */\n CHAR_NO_BREAK_SPACE: '\\u00A0', /* \\u00A0 */\n CHAR_PERCENT: '%', /* % */\n CHAR_PLUS: '+', /* + */\n CHAR_QUESTION_MARK: '?', /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */\n CHAR_RIGHT_CURLY_BRACE: '}', /* } */\n CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */\n CHAR_SEMICOLON: ';', /* ; */\n CHAR_SINGLE_QUOTE: '\\'', /* ' */\n CHAR_SPACE: ' ', /* */\n CHAR_TAB: '\\t', /* \\t */\n CHAR_UNDERSCORE: '_', /* _ */\n CHAR_VERTICAL_LINE: '|', /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\\uFEFF' /* \\uFEFF */\n};\n","'use strict';\n\nconst fill = require('fill-range');\nconst stringify = require('./stringify');\nconst utils = require('./utils');\n\nconst append = (queue = '', stash = '', enclose = false) => {\n let result = [];\n\n queue = [].concat(queue);\n stash = [].concat(stash);\n\n if (!stash.length) return queue;\n if (!queue.length) {\n return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;\n }\n\n for (let item of queue) {\n if (Array.isArray(item)) {\n for (let value of item) {\n result.push(append(value, stash, enclose));\n }\n } else {\n for (let ele of stash) {\n if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;\n result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));\n }\n }\n }\n return utils.flatten(result);\n};\n\nconst expand = (ast, options = {}) => {\n let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;\n\n let walk = (node, parent = {}) => {\n node.queue = [];\n\n let p = parent;\n let q = parent.queue;\n\n while (p.type !== 'brace' && p.type !== 'root' && p.parent) {\n p = p.parent;\n q = p.queue;\n }\n\n if (node.invalid || node.dollar) {\n q.push(append(q.pop(), stringify(node, options)));\n return;\n }\n\n if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {\n q.push(append(q.pop(), ['{}']));\n return;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n\n if (utils.exceedsLimit(...args, options.step, rangeLimit)) {\n throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');\n }\n\n let range = fill(...args, options);\n if (range.length === 0) {\n range = stringify(node, options);\n }\n\n q.push(append(q.pop(), range));\n node.nodes = [];\n return;\n }\n\n let enclose = utils.encloseBrace(node);\n let queue = node.queue;\n let block = node;\n\n while (block.type !== 'brace' && block.type !== 'root' && block.parent) {\n block = block.parent;\n queue = block.queue;\n }\n\n for (let i = 0; i < node.nodes.length; i++) {\n let child = node.nodes[i];\n\n if (child.type === 'comma' && node.type === 'brace') {\n if (i === 1) queue.push('');\n queue.push('');\n continue;\n }\n\n if (child.type === 'close') {\n q.push(append(q.pop(), queue, enclose));\n continue;\n }\n\n if (child.value && child.type !== 'open') {\n queue.push(append(queue.pop(), child.value));\n continue;\n }\n\n if (child.nodes) {\n walk(child, node);\n }\n }\n\n return queue;\n };\n\n return utils.flatten(walk(ast));\n};\n\nmodule.exports = expand;\n","'use strict';\n\nconst stringify = require('./stringify');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n CHAR_BACKSLASH, /* \\ */\n CHAR_BACKTICK, /* ` */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_RIGHT_SQUARE_BRACKET, /* ] */\n CHAR_DOUBLE_QUOTE, /* \" */\n CHAR_SINGLE_QUOTE, /* ' */\n CHAR_NO_BREAK_SPACE,\n CHAR_ZERO_WIDTH_NOBREAK_SPACE\n} = require('./constants');\n\n/**\n * parse\n */\n\nconst parse = (input, options = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n let opts = options || {};\n let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n if (input.length > max) {\n throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);\n }\n\n let ast = { type: 'root', input, nodes: [] };\n let stack = [ast];\n let block = ast;\n let prev = ast;\n let brackets = 0;\n let length = input.length;\n let index = 0;\n let depth = 0;\n let value;\n let memo = {};\n\n /**\n * Helpers\n */\n\n const advance = () => input[index++];\n const push = node => {\n if (node.type === 'text' && prev.type === 'dot') {\n prev.type = 'text';\n }\n\n if (prev && prev.type === 'text' && node.type === 'text') {\n prev.value += node.value;\n return;\n }\n\n block.nodes.push(node);\n node.parent = block;\n node.prev = prev;\n prev = node;\n return node;\n };\n\n push({ type: 'bos' });\n\n while (index < length) {\n block = stack[stack.length - 1];\n value = advance();\n\n /**\n * Invalid chars\n */\n\n if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {\n continue;\n }\n\n /**\n * Escaped chars\n */\n\n if (value === CHAR_BACKSLASH) {\n push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });\n continue;\n }\n\n /**\n * Right square bracket (literal): ']'\n */\n\n if (value === CHAR_RIGHT_SQUARE_BRACKET) {\n push({ type: 'text', value: '\\\\' + value });\n continue;\n }\n\n /**\n * Left square bracket: '['\n */\n\n if (value === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n\n let closed = true;\n let next;\n\n while (index < length && (next = advance())) {\n value += next;\n\n if (next === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n continue;\n }\n\n if (next === CHAR_BACKSLASH) {\n value += advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n brackets--;\n\n if (brackets === 0) {\n break;\n }\n }\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === CHAR_LEFT_PARENTHESES) {\n block = push({ type: 'paren', nodes: [] });\n stack.push(block);\n push({ type: 'text', value });\n continue;\n }\n\n if (value === CHAR_RIGHT_PARENTHESES) {\n if (block.type !== 'paren') {\n push({ type: 'text', value });\n continue;\n }\n block = stack.pop();\n push({ type: 'text', value });\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Quotes: '|\"|`\n */\n\n if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {\n let open = value;\n let next;\n\n if (options.keepQuotes !== true) {\n value = '';\n }\n\n while (index < length && (next = advance())) {\n if (next === CHAR_BACKSLASH) {\n value += next + advance();\n continue;\n }\n\n if (next === open) {\n if (options.keepQuotes === true) value += next;\n break;\n }\n\n value += next;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Left curly brace: '{'\n */\n\n if (value === CHAR_LEFT_CURLY_BRACE) {\n depth++;\n\n let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;\n let brace = {\n type: 'brace',\n open: true,\n close: false,\n dollar,\n depth,\n commas: 0,\n ranges: 0,\n nodes: []\n };\n\n block = push(brace);\n stack.push(block);\n push({ type: 'open', value });\n continue;\n }\n\n /**\n * Right curly brace: '}'\n */\n\n if (value === CHAR_RIGHT_CURLY_BRACE) {\n if (block.type !== 'brace') {\n push({ type: 'text', value });\n continue;\n }\n\n let type = 'close';\n block = stack.pop();\n block.close = true;\n\n push({ type, value });\n depth--;\n\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Comma: ','\n */\n\n if (value === CHAR_COMMA && depth > 0) {\n if (block.ranges > 0) {\n block.ranges = 0;\n let open = block.nodes.shift();\n block.nodes = [open, { type: 'text', value: stringify(block) }];\n }\n\n push({ type: 'comma', value });\n block.commas++;\n continue;\n }\n\n /**\n * Dot: '.'\n */\n\n if (value === CHAR_DOT && depth > 0 && block.commas === 0) {\n let siblings = block.nodes;\n\n if (depth === 0 || siblings.length === 0) {\n push({ type: 'text', value });\n continue;\n }\n\n if (prev.type === 'dot') {\n block.range = [];\n prev.value += value;\n prev.type = 'range';\n\n if (block.nodes.length !== 3 && block.nodes.length !== 5) {\n block.invalid = true;\n block.ranges = 0;\n prev.type = 'text';\n continue;\n }\n\n block.ranges++;\n block.args = [];\n continue;\n }\n\n if (prev.type === 'range') {\n siblings.pop();\n\n let before = siblings[siblings.length - 1];\n before.value += prev.value + value;\n prev = before;\n block.ranges--;\n continue;\n }\n\n push({ type: 'dot', value });\n continue;\n }\n\n /**\n * Text\n */\n\n push({ type: 'text', value });\n }\n\n // Mark imbalanced braces and brackets as invalid\n do {\n block = stack.pop();\n\n if (block.type !== 'root') {\n block.nodes.forEach(node => {\n if (!node.nodes) {\n if (node.type === 'open') node.isOpen = true;\n if (node.type === 'close') node.isClose = true;\n if (!node.nodes) node.type = 'text';\n node.invalid = true;\n }\n });\n\n // get the location of the block on parent.nodes (block's siblings)\n let parent = stack[stack.length - 1];\n let index = parent.nodes.indexOf(block);\n // replace the (invalid) block with it's nodes\n parent.nodes.splice(index, 1, ...block.nodes);\n }\n } while (stack.length > 0);\n\n push({ type: 'eos' });\n return ast;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst utils = require('./utils');\n\nmodule.exports = (ast, options = {}) => {\n let stringify = (node, parent = {}) => {\n let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let output = '';\n\n if (node.value) {\n if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {\n return '\\\\' + node.value;\n }\n return node.value;\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += stringify(child);\n }\n }\n return output;\n };\n\n return stringify(ast);\n};\n\n","'use strict';\n\nexports.isInteger = num => {\n if (typeof num === 'number') {\n return Number.isInteger(num);\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isInteger(Number(num));\n }\n return false;\n};\n\n/**\n * Find a node of the given type\n */\n\nexports.find = (node, type) => node.nodes.find(node => node.type === type);\n\n/**\n * Find a node of the given type\n */\n\nexports.exceedsLimit = (min, max, step = 1, limit) => {\n if (limit === false) return false;\n if (!exports.isInteger(min) || !exports.isInteger(max)) return false;\n return ((Number(max) - Number(min)) / Number(step)) >= limit;\n};\n\n/**\n * Escape the given node with '\\\\' before node.value\n */\n\nexports.escapeNode = (block, n = 0, type) => {\n let node = block.nodes[n];\n if (!node) return;\n\n if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {\n if (node.escaped !== true) {\n node.value = '\\\\' + node.value;\n node.escaped = true;\n }\n }\n};\n\n/**\n * Returns true if the given brace node should be enclosed in literal braces\n */\n\nexports.encloseBrace = node => {\n if (node.type !== 'brace') return false;\n if ((node.commas >> 0 + node.ranges >> 0) === 0) {\n node.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a brace node is invalid.\n */\n\nexports.isInvalidBrace = block => {\n if (block.type !== 'brace') return false;\n if (block.invalid === true || block.dollar) return true;\n if ((block.commas >> 0 + block.ranges >> 0) === 0) {\n block.invalid = true;\n return true;\n }\n if (block.open !== true || block.close !== true) {\n block.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a node is an open or close node\n */\n\nexports.isOpenOrClose = node => {\n if (node.type === 'open' || node.type === 'close') {\n return true;\n }\n return node.open === true || node.close === true;\n};\n\n/**\n * Reduce an array of text nodes.\n */\n\nexports.reduce = nodes => nodes.reduce((acc, node) => {\n if (node.type === 'text') acc.push(node.value);\n if (node.type === 'range') node.type = 'text';\n return acc;\n}, []);\n\n/**\n * Flatten an array\n */\n\nexports.flatten = (...args) => {\n const result = [];\n const flat = arr => {\n for (let i = 0; i < arr.length; i++) {\n let ele = arr[i];\n Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);\n }\n return result;\n };\n flat(args);\n return result;\n};\n","/*!\n * fill-range \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\n'use strict';\n\nconst util = require('util');\nconst toRegexRange = require('to-regex-range');\n\nconst isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\n\nconst transform = toNumber => {\n return value => toNumber === true ? Number(value) : String(value);\n};\n\nconst isValidValue = value => {\n return typeof value === 'number' || (typeof value === 'string' && value !== '');\n};\n\nconst isNumber = num => Number.isInteger(+num);\n\nconst zeros = input => {\n let value = `${input}`;\n let index = -1;\n if (value[0] === '-') value = value.slice(1);\n if (value === '0') return false;\n while (value[++index] === '0');\n return index > 0;\n};\n\nconst stringify = (start, end, options) => {\n if (typeof start === 'string' || typeof end === 'string') {\n return true;\n }\n return options.stringify === true;\n};\n\nconst pad = (input, maxLength, toNumber) => {\n if (maxLength > 0) {\n let dash = input[0] === '-' ? '-' : '';\n if (dash) input = input.slice(1);\n input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));\n }\n if (toNumber === false) {\n return String(input);\n }\n return input;\n};\n\nconst toMaxLen = (input, maxLength) => {\n let negative = input[0] === '-' ? '-' : '';\n if (negative) {\n input = input.slice(1);\n maxLength--;\n }\n while (input.length < maxLength) input = '0' + input;\n return negative ? ('-' + input) : input;\n};\n\nconst toSequence = (parts, options) => {\n parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n\n let prefix = options.capture ? '' : '?:';\n let positives = '';\n let negatives = '';\n let result;\n\n if (parts.positives.length) {\n positives = parts.positives.join('|');\n }\n\n if (parts.negatives.length) {\n negatives = `-(${prefix}${parts.negatives.join('|')})`;\n }\n\n if (positives && negatives) {\n result = `${positives}|${negatives}`;\n } else {\n result = positives || negatives;\n }\n\n if (options.wrap) {\n return `(${prefix}${result})`;\n }\n\n return result;\n};\n\nconst toRange = (a, b, isNumbers, options) => {\n if (isNumbers) {\n return toRegexRange(a, b, { wrap: false, ...options });\n }\n\n let start = String.fromCharCode(a);\n if (a === b) return start;\n\n let stop = String.fromCharCode(b);\n return `[${start}-${stop}]`;\n};\n\nconst toRegex = (start, end, options) => {\n if (Array.isArray(start)) {\n let wrap = options.wrap === true;\n let prefix = options.capture ? '' : '?:';\n return wrap ? `(${prefix}${start.join('|')})` : start.join('|');\n }\n return toRegexRange(start, end, options);\n};\n\nconst rangeError = (...args) => {\n return new RangeError('Invalid range arguments: ' + util.inspect(...args));\n};\n\nconst invalidRange = (start, end, options) => {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n};\n\nconst invalidStep = (step, options) => {\n if (options.strictRanges === true) {\n throw new TypeError(`Expected step \"${step}\" to be a number`);\n }\n return [];\n};\n\nconst fillNumbers = (start, end, step = 1, options = {}) => {\n let a = Number(start);\n let b = Number(end);\n\n if (!Number.isInteger(a) || !Number.isInteger(b)) {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n }\n\n // fix negative zero\n if (a === 0) a = 0;\n if (b === 0) b = 0;\n\n let descending = a > b;\n let startString = String(start);\n let endString = String(end);\n let stepString = String(step);\n step = Math.max(Math.abs(step), 1);\n\n let padded = zeros(startString) || zeros(endString) || zeros(stepString);\n let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;\n let toNumber = padded === false && stringify(start, end, options) === false;\n let format = options.transform || transform(toNumber);\n\n if (options.toRegex && step === 1) {\n return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);\n }\n\n let parts = { negatives: [], positives: [] };\n let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n if (options.toRegex === true && step > 1) {\n push(a);\n } else {\n range.push(pad(format(a, index), maxLen, toNumber));\n }\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return step > 1\n ? toSequence(parts, options)\n : toRegex(range, null, { wrap: false, ...options });\n }\n\n return range;\n};\n\nconst fillLetters = (start, end, step = 1, options = {}) => {\n if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {\n return invalidRange(start, end, options);\n }\n\n\n let format = options.transform || (val => String.fromCharCode(val));\n let a = `${start}`.charCodeAt(0);\n let b = `${end}`.charCodeAt(0);\n\n let descending = a > b;\n let min = Math.min(a, b);\n let max = Math.max(a, b);\n\n if (options.toRegex && step === 1) {\n return toRange(min, max, false, options);\n }\n\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n range.push(format(a, index));\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return toRegex(range, null, { wrap: false, options });\n }\n\n return range;\n};\n\nconst fill = (start, end, step, options = {}) => {\n if (end == null && isValidValue(start)) {\n return [start];\n }\n\n if (!isValidValue(start) || !isValidValue(end)) {\n return invalidRange(start, end, options);\n }\n\n if (typeof step === 'function') {\n return fill(start, end, 1, { transform: step });\n }\n\n if (isObject(step)) {\n return fill(start, end, 0, step);\n }\n\n let opts = { ...options };\n if (opts.capture === true) opts.wrap = true;\n step = step || opts.step || 1;\n\n if (!isNumber(step)) {\n if (step != null && !isObject(step)) return invalidStep(step, opts);\n return fill(start, end, 1, step);\n }\n\n if (isNumber(start) && isNumber(end)) {\n return fillNumbers(start, end, step, opts);\n }\n\n return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);\n};\n\nmodule.exports = fill;\n","/*!\n * is-number \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nmodule.exports = function(num) {\n if (typeof num === 'number') {\n return num - num === 0;\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);\n }\n return false;\n};\n","'use strict';\n\nconst util = require('util');\nconst braces = require('braces');\nconst picomatch = require('picomatch');\nconst utils = require('picomatch/lib/utils');\nconst isEmptyString = val => typeof val === 'string' && (val === '' || val === './');\n\n/**\n * Returns an array of strings that match one or more glob patterns.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm(list, patterns[, options]);\n *\n * console.log(mm(['a.js', 'a.txt'], ['*.js']));\n * //=> [ 'a.js' ]\n * ```\n * @param {String|Array} list List of strings to match.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} options See available [options](#options)\n * @return {Array} Returns an array of matches\n * @summary false\n * @api public\n */\n\nconst micromatch = (list, patterns, options) => {\n patterns = [].concat(patterns);\n list = [].concat(list);\n\n let omit = new Set();\n let keep = new Set();\n let items = new Set();\n let negatives = 0;\n\n let onResult = state => {\n items.add(state.output);\n if (options && options.onResult) {\n options.onResult(state);\n }\n };\n\n for (let i = 0; i < patterns.length; i++) {\n let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);\n let negated = isMatch.state.negated || isMatch.state.negatedExtglob;\n if (negated) negatives++;\n\n for (let item of list) {\n let matched = isMatch(item, true);\n\n let match = negated ? !matched.isMatch : matched.isMatch;\n if (!match) continue;\n\n if (negated) {\n omit.add(matched.output);\n } else {\n omit.delete(matched.output);\n keep.add(matched.output);\n }\n }\n }\n\n let result = negatives === patterns.length ? [...items] : [...keep];\n let matches = result.filter(item => !omit.has(item));\n\n if (options && matches.length === 0) {\n if (options.failglob === true) {\n throw new Error(`No matches found for \"${patterns.join(', ')}\"`);\n }\n\n if (options.nonull === true || options.nullglob === true) {\n return options.unescape ? patterns.map(p => p.replace(/\\\\/g, '')) : patterns;\n }\n }\n\n return matches;\n};\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.match = micromatch;\n\n/**\n * Returns a matcher function from the given glob `pattern` and `options`.\n * The returned function takes a string to match as its only argument and returns\n * true if the string is a match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matcher(pattern[, options]);\n *\n * const isMatch = mm.matcher('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @param {String} `pattern` Glob pattern\n * @param {Object} `options`\n * @return {Function} Returns a matcher function.\n * @api public\n */\n\nmicromatch.matcher = (pattern, options) => picomatch(pattern, options);\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.isMatch(string, patterns[, options]);\n *\n * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(mm.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.any = micromatch.isMatch;\n\n/**\n * Returns a list of strings that _**do not match any**_ of the given `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.not(list, patterns[, options]);\n *\n * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));\n * //=> ['b.b', 'c.c']\n * ```\n * @param {Array} `list` Array of strings to match.\n * @param {String|Array} `patterns` One or more glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Array} Returns an array of strings that **do not match** the given patterns.\n * @api public\n */\n\nmicromatch.not = (list, patterns, options = {}) => {\n patterns = [].concat(patterns).map(String);\n let result = new Set();\n let items = [];\n\n let onResult = state => {\n if (options.onResult) options.onResult(state);\n items.push(state.output);\n };\n\n let matches = micromatch(list, patterns, { ...options, onResult });\n\n for (let item of items) {\n if (!matches.includes(item)) {\n result.add(item);\n }\n }\n return [...result];\n};\n\n/**\n * Returns true if the given `string` contains the given pattern. Similar\n * to [.isMatch](#isMatch) but the pattern can match any part of the string.\n *\n * ```js\n * var mm = require('micromatch');\n * // mm.contains(string, pattern[, options]);\n *\n * console.log(mm.contains('aa/bb/cc', '*b'));\n * //=> true\n * console.log(mm.contains('aa/bb/cc', '*d'));\n * //=> false\n * ```\n * @param {String} `str` The string to match.\n * @param {String|Array} `patterns` Glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if the patter matches any part of `str`.\n * @api public\n */\n\nmicromatch.contains = (str, pattern, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n if (Array.isArray(pattern)) {\n return pattern.some(p => micromatch.contains(str, p, options));\n }\n\n if (typeof pattern === 'string') {\n if (isEmptyString(str) || isEmptyString(pattern)) {\n return false;\n }\n\n if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {\n return true;\n }\n }\n\n return micromatch.isMatch(str, pattern, { ...options, contains: true });\n};\n\n/**\n * Filter the keys of the given object with the given `glob` pattern\n * and `options`. Does not attempt to match nested keys. If you need this feature,\n * use [glob-object][] instead.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matchKeys(object, patterns[, options]);\n *\n * const obj = { aa: 'a', ab: 'b', ac: 'c' };\n * console.log(mm.matchKeys(obj, '*b'));\n * //=> { ab: 'b' }\n * ```\n * @param {Object} `object` The object with keys to filter.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Object} Returns an object with only keys that match the given patterns.\n * @api public\n */\n\nmicromatch.matchKeys = (obj, patterns, options) => {\n if (!utils.isObject(obj)) {\n throw new TypeError('Expected the first argument to be an object');\n }\n let keys = micromatch(Object.keys(obj), patterns, options);\n let res = {};\n for (let key of keys) res[key] = obj[key];\n return res;\n};\n\n/**\n * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.some(list, patterns[, options]);\n *\n * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // true\n * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.some = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (items.some(item => isMatch(item))) {\n return true;\n }\n }\n return false;\n};\n\n/**\n * Returns true if every string in the given `list` matches\n * any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.every(list, patterns[, options]);\n *\n * console.log(mm.every('foo.js', ['foo.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // false\n * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.every = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (!items.every(item => isMatch(item))) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Returns true if **all** of the given `patterns` match\n * the specified string.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.all(string, patterns[, options]);\n *\n * console.log(mm.all('foo.js', ['foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', '!foo.js']));\n * // false\n *\n * console.log(mm.all('foo.js', ['*.js', 'foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));\n * // true\n * ```\n * @param {String|Array} `str` The string to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.all = (str, patterns, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n return [].concat(patterns).every(p => picomatch(p, options)(str));\n};\n\n/**\n * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.capture(pattern, string[, options]);\n *\n * console.log(mm.capture('test/*.js', 'test/foo.js'));\n * //=> ['foo']\n * console.log(mm.capture('test/*.js', 'foo/bar.css'));\n * //=> null\n * ```\n * @param {String} `glob` Glob pattern to use for matching.\n * @param {String} `input` String to match\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.\n * @api public\n */\n\nmicromatch.capture = (glob, input, options) => {\n let posix = utils.isWindows(options);\n let regex = picomatch.makeRe(String(glob), { ...options, capture: true });\n let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);\n\n if (match) {\n return match.slice(1).map(v => v === void 0 ? '' : v);\n }\n};\n\n/**\n * Create a regular expression from the given glob `pattern`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.makeRe(pattern[, options]);\n *\n * console.log(mm.makeRe('*.js'));\n * //=> /^(?:(\\.[\\\\\\/])?(?!\\.)(?=.)[^\\/]*?\\.js)$/\n * ```\n * @param {String} `pattern` A glob pattern to convert to regex.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\nmicromatch.makeRe = (...args) => picomatch.makeRe(...args);\n\n/**\n * Scan a glob pattern to separate the pattern into segments. Used\n * by the [split](#split) method.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm.scan(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\nmicromatch.scan = (...args) => picomatch.scan(...args);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm(pattern[, options]);\n * ```\n * @param {String} `glob`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as regex source string.\n * @api public\n */\n\nmicromatch.parse = (patterns, options) => {\n let res = [];\n for (let pattern of [].concat(patterns || [])) {\n for (let str of braces(String(pattern), options)) {\n res.push(picomatch.parse(str, options));\n }\n }\n return res;\n};\n\n/**\n * Process the given brace `pattern`.\n *\n * ```js\n * const { braces } = require('micromatch');\n * console.log(braces('foo/{a,b,c}/bar'));\n * //=> [ 'foo/(a|b|c)/bar' ]\n *\n * console.log(braces('foo/{a,b,c}/bar', { expand: true }));\n * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]\n * ```\n * @param {String} `pattern` String with brace pattern to process.\n * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.\n * @return {Array}\n * @api public\n */\n\nmicromatch.braces = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n if ((options && options.nobrace === true) || !/\\{.*\\}/.test(pattern)) {\n return [pattern];\n }\n return braces(pattern, options);\n};\n\n/**\n * Expand braces\n */\n\nmicromatch.braceExpand = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n return micromatch.braces(pattern, { ...options, expand: true });\n};\n\n/**\n * Expose micromatch\n */\n\nmodule.exports = micromatch;\n","/*!\n * to-regex-range \n *\n * Copyright (c) 2015-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nconst isNumber = require('is-number');\n\nconst toRegexRange = (min, max, options) => {\n if (isNumber(min) === false) {\n throw new TypeError('toRegexRange: expected the first argument to be a number');\n }\n\n if (max === void 0 || min === max) {\n return String(min);\n }\n\n if (isNumber(max) === false) {\n throw new TypeError('toRegexRange: expected the second argument to be a number.');\n }\n\n let opts = { relaxZeros: true, ...options };\n if (typeof opts.strictZeros === 'boolean') {\n opts.relaxZeros = opts.strictZeros === false;\n }\n\n let relax = String(opts.relaxZeros);\n let shorthand = String(opts.shorthand);\n let capture = String(opts.capture);\n let wrap = String(opts.wrap);\n let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;\n\n if (toRegexRange.cache.hasOwnProperty(cacheKey)) {\n return toRegexRange.cache[cacheKey].result;\n }\n\n let a = Math.min(min, max);\n let b = Math.max(min, max);\n\n if (Math.abs(a - b) === 1) {\n let result = min + '|' + max;\n if (opts.capture) {\n return `(${result})`;\n }\n if (opts.wrap === false) {\n return result;\n }\n return `(?:${result})`;\n }\n\n let isPadded = hasPadding(min) || hasPadding(max);\n let state = { min, max, a, b };\n let positives = [];\n let negatives = [];\n\n if (isPadded) {\n state.isPadded = isPadded;\n state.maxLen = String(state.max).length;\n }\n\n if (a < 0) {\n let newMin = b < 0 ? Math.abs(b) : 1;\n negatives = splitToPatterns(newMin, Math.abs(a), state, opts);\n a = state.a = 0;\n }\n\n if (b >= 0) {\n positives = splitToPatterns(a, b, state, opts);\n }\n\n state.negatives = negatives;\n state.positives = positives;\n state.result = collatePatterns(negatives, positives, opts);\n\n if (opts.capture === true) {\n state.result = `(${state.result})`;\n } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {\n state.result = `(?:${state.result})`;\n }\n\n toRegexRange.cache[cacheKey] = state;\n return state.result;\n};\n\nfunction collatePatterns(neg, pos, options) {\n let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];\n let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];\n let intersected = filterPatterns(neg, pos, '-?', true, options) || [];\n let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);\n return subpatterns.join('|');\n}\n\nfunction splitToRanges(min, max) {\n let nines = 1;\n let zeros = 1;\n\n let stop = countNines(min, nines);\n let stops = new Set([max]);\n\n while (min <= stop && stop <= max) {\n stops.add(stop);\n nines += 1;\n stop = countNines(min, nines);\n }\n\n stop = countZeros(max + 1, zeros) - 1;\n\n while (min < stop && stop <= max) {\n stops.add(stop);\n zeros += 1;\n stop = countZeros(max + 1, zeros) - 1;\n }\n\n stops = [...stops];\n stops.sort(compare);\n return stops;\n}\n\n/**\n * Convert a range to a regex pattern\n * @param {Number} `start`\n * @param {Number} `stop`\n * @return {String}\n */\n\nfunction rangeToPattern(start, stop, options) {\n if (start === stop) {\n return { pattern: start, count: [], digits: 0 };\n }\n\n let zipped = zip(start, stop);\n let digits = zipped.length;\n let pattern = '';\n let count = 0;\n\n for (let i = 0; i < digits; i++) {\n let [startDigit, stopDigit] = zipped[i];\n\n if (startDigit === stopDigit) {\n pattern += startDigit;\n\n } else if (startDigit !== '0' || stopDigit !== '9') {\n pattern += toCharacterClass(startDigit, stopDigit, options);\n\n } else {\n count++;\n }\n }\n\n if (count) {\n pattern += options.shorthand === true ? '\\\\d' : '[0-9]';\n }\n\n return { pattern, count: [count], digits };\n}\n\nfunction splitToPatterns(min, max, tok, options) {\n let ranges = splitToRanges(min, max);\n let tokens = [];\n let start = min;\n let prev;\n\n for (let i = 0; i < ranges.length; i++) {\n let max = ranges[i];\n let obj = rangeToPattern(String(start), String(max), options);\n let zeros = '';\n\n if (!tok.isPadded && prev && prev.pattern === obj.pattern) {\n if (prev.count.length > 1) {\n prev.count.pop();\n }\n\n prev.count.push(obj.count[0]);\n prev.string = prev.pattern + toQuantifier(prev.count);\n start = max + 1;\n continue;\n }\n\n if (tok.isPadded) {\n zeros = padZeros(max, tok, options);\n }\n\n obj.string = zeros + obj.pattern + toQuantifier(obj.count);\n tokens.push(obj);\n start = max + 1;\n prev = obj;\n }\n\n return tokens;\n}\n\nfunction filterPatterns(arr, comparison, prefix, intersection, options) {\n let result = [];\n\n for (let ele of arr) {\n let { string } = ele;\n\n // only push if _both_ are negative...\n if (!intersection && !contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n\n // or _both_ are positive\n if (intersection && contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n }\n return result;\n}\n\n/**\n * Zip strings\n */\n\nfunction zip(a, b) {\n let arr = [];\n for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);\n return arr;\n}\n\nfunction compare(a, b) {\n return a > b ? 1 : b > a ? -1 : 0;\n}\n\nfunction contains(arr, key, val) {\n return arr.some(ele => ele[key] === val);\n}\n\nfunction countNines(min, len) {\n return Number(String(min).slice(0, -len) + '9'.repeat(len));\n}\n\nfunction countZeros(integer, zeros) {\n return integer - (integer % Math.pow(10, zeros));\n}\n\nfunction toQuantifier(digits) {\n let [start = 0, stop = ''] = digits;\n if (stop || start > 1) {\n return `{${start + (stop ? ',' + stop : '')}}`;\n }\n return '';\n}\n\nfunction toCharacterClass(a, b, options) {\n return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;\n}\n\nfunction hasPadding(str) {\n return /^-?(0+)\\d/.test(str);\n}\n\nfunction padZeros(value, tok, options) {\n if (!tok.isPadded) {\n return value;\n }\n\n let diff = Math.abs(tok.maxLen - String(value).length);\n let relax = options.relaxZeros !== false;\n\n switch (diff) {\n case 0:\n return '';\n case 1:\n return relax ? '0?' : '0';\n case 2:\n return relax ? '0{0,2}' : '00';\n default: {\n return relax ? `0{0,${diff}}` : `0{${diff}}`;\n }\n }\n}\n\n/**\n * Cache\n */\n\ntoRegexRange.cache = {};\ntoRegexRange.clearCache = () => (toRegexRange.cache = {});\n\n/**\n * Expose `toRegexRange`\n */\n\nmodule.exports = toRegexRange;\n","\"use strict\";\r\nconst taskManager = require(\"./managers/tasks\");\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nconst utils = require(\"./utils\");\r\nasync function FastGlob(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, async_1.default, options);\r\n const result = await Promise.all(works);\r\n return utils.array.flatten(result);\r\n}\r\n// https://github.com/typescript-eslint/typescript-eslint/issues/60\r\n// eslint-disable-next-line no-redeclare\r\n(function (FastGlob) {\r\n function sync(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, sync_1.default, options);\r\n return utils.array.flatten(works);\r\n }\r\n FastGlob.sync = sync;\r\n function stream(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, stream_1.default, options);\r\n /**\r\n * The stream returned by the provider cannot work with an asynchronous iterator.\r\n * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.\r\n * This affects performance (+25%). I don't see best solution right now.\r\n */\r\n return utils.stream.merge(works);\r\n }\r\n FastGlob.stream = stream;\r\n function generateTasks(source, options) {\r\n assertPatternsInput(source);\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n return taskManager.generate(patterns, settings);\r\n }\r\n FastGlob.generateTasks = generateTasks;\r\n function isDynamicPattern(source, options) {\r\n assertPatternsInput(source);\r\n const settings = new settings_1.default(options);\r\n return utils.pattern.isDynamicPattern(source, settings);\r\n }\r\n FastGlob.isDynamicPattern = isDynamicPattern;\r\n function escapePath(source) {\r\n assertPatternsInput(source);\r\n return utils.path.escape(source);\r\n }\r\n FastGlob.escapePath = escapePath;\r\n})(FastGlob || (FastGlob = {}));\r\nfunction getWorks(source, _Provider, options) {\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n const tasks = taskManager.generate(patterns, settings);\r\n const provider = new _Provider(settings);\r\n return tasks.map(provider.read, provider);\r\n}\r\nfunction assertPatternsInput(input) {\r\n const source = [].concat(input);\r\n const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));\r\n if (!isValidSource) {\r\n throw new TypeError('Patterns must be a string (non empty) or an array of strings');\r\n }\r\n}\r\nmodule.exports = FastGlob;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;\r\nconst utils = require(\"../utils\");\r\nfunction generate(patterns, settings) {\r\n const positivePatterns = getPositivePatterns(patterns);\r\n const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);\r\n const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));\r\n const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));\r\n const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);\r\n const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);\r\n return staticTasks.concat(dynamicTasks);\r\n}\r\nexports.generate = generate;\r\nfunction convertPatternsToTasks(positive, negative, dynamic) {\r\n const positivePatternsGroup = groupPatternsByBaseDirectory(positive);\r\n // When we have a global group – there is no reason to divide the patterns into independent tasks.\r\n // In this case, the global task covers the rest.\r\n if ('.' in positivePatternsGroup) {\r\n const task = convertPatternGroupToTask('.', positive, negative, dynamic);\r\n return [task];\r\n }\r\n return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);\r\n}\r\nexports.convertPatternsToTasks = convertPatternsToTasks;\r\nfunction getPositivePatterns(patterns) {\r\n return utils.pattern.getPositivePatterns(patterns);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getNegativePatternsAsPositive(patterns, ignore) {\r\n const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);\r\n const positive = negative.map(utils.pattern.convertToPositivePattern);\r\n return positive;\r\n}\r\nexports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;\r\nfunction groupPatternsByBaseDirectory(patterns) {\r\n const group = {};\r\n return patterns.reduce((collection, pattern) => {\r\n const base = utils.pattern.getBaseDirectory(pattern);\r\n if (base in collection) {\r\n collection[base].push(pattern);\r\n }\r\n else {\r\n collection[base] = [pattern];\r\n }\r\n return collection;\r\n }, group);\r\n}\r\nexports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;\r\nfunction convertPatternGroupsToTasks(positive, negative, dynamic) {\r\n return Object.keys(positive).map((base) => {\r\n return convertPatternGroupToTask(base, positive[base], negative, dynamic);\r\n });\r\n}\r\nexports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;\r\nfunction convertPatternGroupToTask(base, positive, negative, dynamic) {\r\n return {\r\n dynamic,\r\n positive,\r\n negative,\r\n base,\r\n patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))\r\n };\r\n}\r\nexports.convertPatternGroupToTask = convertPatternGroupToTask;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderAsync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = [];\r\n return new Promise((resolve, reject) => {\r\n const stream = this.api(root, task, options);\r\n stream.once('error', reject);\r\n stream.on('data', (entry) => entries.push(options.transform(entry)));\r\n stream.once('end', () => resolve(entries));\r\n });\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderAsync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nconst partial_1 = require(\"../matchers/partial\");\r\nclass DeepFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n }\r\n getFilter(basePath, positive, negative) {\r\n const matcher = this._getMatcher(positive);\r\n const negativeRe = this._getNegativePatternsRe(negative);\r\n return (entry) => this._filter(basePath, entry, matcher, negativeRe);\r\n }\r\n _getMatcher(patterns) {\r\n return new partial_1.default(patterns, this._settings, this._micromatchOptions);\r\n }\r\n _getNegativePatternsRe(patterns) {\r\n const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);\r\n return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);\r\n }\r\n _filter(basePath, entry, matcher, negativeRe) {\r\n if (this._isSkippedByDeep(basePath, entry.path)) {\r\n return false;\r\n }\r\n if (this._isSkippedSymbolicLink(entry)) {\r\n return false;\r\n }\r\n const filepath = utils.path.removeLeadingDotSegment(entry.path);\r\n if (this._isSkippedByPositivePatterns(filepath, matcher)) {\r\n return false;\r\n }\r\n return this._isSkippedByNegativePatterns(filepath, negativeRe);\r\n }\r\n _isSkippedByDeep(basePath, entryPath) {\r\n /**\r\n * Avoid unnecessary depth calculations when it doesn't matter.\r\n */\r\n if (this._settings.deep === Infinity) {\r\n return false;\r\n }\r\n return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;\r\n }\r\n _getEntryLevel(basePath, entryPath) {\r\n const entryPathDepth = entryPath.split('/').length;\r\n if (basePath === '') {\r\n return entryPathDepth;\r\n }\r\n const basePathDepth = basePath.split('/').length;\r\n return entryPathDepth - basePathDepth;\r\n }\r\n _isSkippedSymbolicLink(entry) {\r\n return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();\r\n }\r\n _isSkippedByPositivePatterns(entryPath, matcher) {\r\n return !this._settings.baseNameMatch && !matcher.match(entryPath);\r\n }\r\n _isSkippedByNegativePatterns(entryPath, patternsRe) {\r\n return !utils.pattern.matchAny(entryPath, patternsRe);\r\n }\r\n}\r\nexports.default = DeepFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this.index = new Map();\r\n }\r\n getFilter(positive, negative) {\r\n const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);\r\n const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);\r\n return (entry) => this._filter(entry, positiveRe, negativeRe);\r\n }\r\n _filter(entry, positiveRe, negativeRe) {\r\n if (this._settings.unique && this._isDuplicateEntry(entry)) {\r\n return false;\r\n }\r\n if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {\r\n return false;\r\n }\r\n if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {\r\n return false;\r\n }\r\n const filepath = this._settings.baseNameMatch ? entry.name : entry.path;\r\n const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);\r\n if (this._settings.unique && isMatched) {\r\n this._createIndexRecord(entry);\r\n }\r\n return isMatched;\r\n }\r\n _isDuplicateEntry(entry) {\r\n return this.index.has(entry.path);\r\n }\r\n _createIndexRecord(entry) {\r\n this.index.set(entry.path, undefined);\r\n }\r\n _onlyFileFilter(entry) {\r\n return this._settings.onlyFiles && !entry.dirent.isFile();\r\n }\r\n _onlyDirectoryFilter(entry) {\r\n return this._settings.onlyDirectories && !entry.dirent.isDirectory();\r\n }\r\n _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {\r\n if (!this._settings.absolute) {\r\n return false;\r\n }\r\n const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);\r\n return utils.pattern.matchAny(fullpath, patternsRe);\r\n }\r\n _isMatchToPatterns(entryPath, patternsRe) {\r\n const filepath = utils.path.removeLeadingDotSegment(entryPath);\r\n return utils.pattern.matchAny(filepath, patternsRe);\r\n }\r\n}\r\nexports.default = EntryFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass ErrorFilter {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getFilter() {\r\n return (error) => this._isNonFatalError(error);\r\n }\r\n _isNonFatalError(error) {\r\n return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = ErrorFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass Matcher {\r\n constructor(_patterns, _settings, _micromatchOptions) {\r\n this._patterns = _patterns;\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this._storage = [];\r\n this._fillStorage();\r\n }\r\n _fillStorage() {\r\n /**\r\n * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).\r\n * So, before expand patterns with brace expansion into separated patterns.\r\n */\r\n const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);\r\n for (const pattern of patterns) {\r\n const segments = this._getPatternSegments(pattern);\r\n const sections = this._splitSegmentsIntoSections(segments);\r\n this._storage.push({\r\n complete: sections.length <= 1,\r\n pattern,\r\n segments,\r\n sections\r\n });\r\n }\r\n }\r\n _getPatternSegments(pattern) {\r\n const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);\r\n return parts.map((part) => {\r\n const dynamic = utils.pattern.isDynamicPattern(part, this._settings);\r\n if (!dynamic) {\r\n return {\r\n dynamic: false,\r\n pattern: part\r\n };\r\n }\r\n return {\r\n dynamic: true,\r\n pattern: part,\r\n patternRe: utils.pattern.makeRe(part, this._micromatchOptions)\r\n };\r\n });\r\n }\r\n _splitSegmentsIntoSections(segments) {\r\n return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));\r\n }\r\n}\r\nexports.default = Matcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst matcher_1 = require(\"./matcher\");\r\nclass PartialMatcher extends matcher_1.default {\r\n match(filepath) {\r\n const parts = filepath.split('/');\r\n const levels = parts.length;\r\n const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);\r\n for (const pattern of patterns) {\r\n const section = pattern.sections[0];\r\n /**\r\n * In this case, the pattern has a globstar and we must read all directories unconditionally,\r\n * but only if the level has reached the end of the first group.\r\n *\r\n * fixtures/{a,b}/**\r\n * ^ true/false ^ always true\r\n */\r\n if (!pattern.complete && levels > section.length) {\r\n return true;\r\n }\r\n const match = parts.every((part, index) => {\r\n const segment = pattern.segments[index];\r\n if (segment.dynamic && segment.patternRe.test(part)) {\r\n return true;\r\n }\r\n if (!segment.dynamic && segment.pattern === part) {\r\n return true;\r\n }\r\n return false;\r\n });\r\n if (match) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n}\r\nexports.default = PartialMatcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst deep_1 = require(\"./filters/deep\");\r\nconst entry_1 = require(\"./filters/entry\");\r\nconst error_1 = require(\"./filters/error\");\r\nconst entry_2 = require(\"./transformers/entry\");\r\nclass Provider {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this.errorFilter = new error_1.default(this._settings);\r\n this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());\r\n this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());\r\n this.entryTransformer = new entry_2.default(this._settings);\r\n }\r\n _getRootDirectory(task) {\r\n return path.resolve(this._settings.cwd, task.base);\r\n }\r\n _getReaderOptions(task) {\r\n const basePath = task.base === '.' ? '' : task.base;\r\n return {\r\n basePath,\r\n pathSegmentSeparator: '/',\r\n concurrency: this._settings.concurrency,\r\n deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),\r\n entryFilter: this.entryFilter.getFilter(task.positive, task.negative),\r\n errorFilter: this.errorFilter.getFilter(),\r\n followSymbolicLinks: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n stats: this._settings.stats,\r\n throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,\r\n transform: this.entryTransformer.getTransformer()\r\n };\r\n }\r\n _getMicromatchOptions() {\r\n return {\r\n dot: this._settings.dot,\r\n matchBase: this._settings.baseNameMatch,\r\n nobrace: !this._settings.braceExpansion,\r\n nocase: !this._settings.caseSensitiveMatch,\r\n noext: !this._settings.extglob,\r\n noglobstar: !this._settings.globstar,\r\n posix: true,\r\n strictSlashes: false\r\n };\r\n }\r\n}\r\nexports.default = Provider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst stream_2 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderStream extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_2.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const source = this.api(root, task, options);\r\n const destination = new stream_1.Readable({ objectMode: true, read: () => { } });\r\n source\r\n .once('error', (error) => destination.emit('error', error))\r\n .on('data', (entry) => destination.emit('data', options.transform(entry)))\r\n .once('end', () => destination.emit('end'));\r\n destination\r\n .once('close', () => source.destroy());\r\n return destination;\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderSync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new sync_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = this.api(root, task, options);\r\n return entries.map(options.transform);\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryTransformer {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getTransformer() {\r\n return (entry) => this._transform(entry);\r\n }\r\n _transform(entry) {\r\n let filepath = entry.path;\r\n if (this._settings.absolute) {\r\n filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);\r\n filepath = utils.path.unixify(filepath);\r\n }\r\n if (this._settings.markDirectories && entry.dirent.isDirectory()) {\r\n filepath += '/';\r\n }\r\n if (!this._settings.objectMode) {\r\n return filepath;\r\n }\r\n return Object.assign(Object.assign({}, entry), { path: filepath });\r\n }\r\n}\r\nexports.default = EntryTransformer;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst utils = require(\"../utils\");\r\nclass Reader {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this._fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks\r\n });\r\n }\r\n _getFullEntryPath(filepath) {\r\n return path.resolve(this._settings.cwd, filepath);\r\n }\r\n _makeEntry(stats, pattern) {\r\n const entry = {\r\n name: pattern,\r\n path: pattern,\r\n dirent: utils.fs.createDirentFromStats(pattern, stats)\r\n };\r\n if (this._settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n }\r\n _isFatalError(error) {\r\n return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderStream extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkStream = fsWalk.walkStream;\r\n this._stat = fsStat.stat;\r\n }\r\n dynamic(root, options) {\r\n return this._walkStream(root, options);\r\n }\r\n static(patterns, options) {\r\n const filepaths = patterns.map(this._getFullEntryPath, this);\r\n const stream = new stream_1.PassThrough({ objectMode: true });\r\n stream._write = (index, _enc, done) => {\r\n return this._getEntry(filepaths[index], patterns[index], options)\r\n .then((entry) => {\r\n if (entry !== null && options.entryFilter(entry)) {\r\n stream.push(entry);\r\n }\r\n if (index === filepaths.length - 1) {\r\n stream.end();\r\n }\r\n done();\r\n })\r\n .catch(done);\r\n };\r\n for (let i = 0; i < filepaths.length; i++) {\r\n stream.write(i);\r\n }\r\n return stream;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n return this._getStat(filepath)\r\n .then((stats) => this._makeEntry(stats, pattern))\r\n .catch((error) => {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n });\r\n }\r\n _getStat(filepath) {\r\n return new Promise((resolve, reject) => {\r\n this._stat(filepath, this._fsStatSettings, (error, stats) => {\r\n return error === null ? resolve(stats) : reject(error);\r\n });\r\n });\r\n }\r\n}\r\nexports.default = ReaderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderSync extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkSync = fsWalk.walkSync;\r\n this._statSync = fsStat.statSync;\r\n }\r\n dynamic(root, options) {\r\n return this._walkSync(root, options);\r\n }\r\n static(patterns, options) {\r\n const entries = [];\r\n for (const pattern of patterns) {\r\n const filepath = this._getFullEntryPath(pattern);\r\n const entry = this._getEntry(filepath, pattern, options);\r\n if (entry === null || !options.entryFilter(entry)) {\r\n continue;\r\n }\r\n entries.push(entry);\r\n }\r\n return entries;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n try {\r\n const stats = this._getStat(filepath);\r\n return this._makeEntry(stats, pattern);\r\n }\r\n catch (error) {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n }\r\n }\r\n _getStat(filepath) {\r\n return this._statSync(filepath, this._fsStatSettings);\r\n }\r\n}\r\nexports.default = ReaderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nconst os = require(\"os\");\r\nconst CPU_COUNT = os.cpus().length;\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n lstatSync: fs.lstatSync,\r\n stat: fs.stat,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.absolute = this._getValue(this._options.absolute, false);\r\n this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);\r\n this.braceExpansion = this._getValue(this._options.braceExpansion, true);\r\n this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);\r\n this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);\r\n this.cwd = this._getValue(this._options.cwd, process.cwd());\r\n this.deep = this._getValue(this._options.deep, Infinity);\r\n this.dot = this._getValue(this._options.dot, false);\r\n this.extglob = this._getValue(this._options.extglob, true);\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);\r\n this.fs = this._getFileSystemMethods(this._options.fs);\r\n this.globstar = this._getValue(this._options.globstar, true);\r\n this.ignore = this._getValue(this._options.ignore, []);\r\n this.markDirectories = this._getValue(this._options.markDirectories, false);\r\n this.objectMode = this._getValue(this._options.objectMode, false);\r\n this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);\r\n this.onlyFiles = this._getValue(this._options.onlyFiles, true);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.suppressErrors = this._getValue(this._options.suppressErrors, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);\r\n this.unique = this._getValue(this._options.unique, true);\r\n if (this.onlyDirectories) {\r\n this.onlyFiles = false;\r\n }\r\n if (this.stats) {\r\n this.objectMode = true;\r\n }\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n _getFileSystemMethods(methods = {}) {\r\n return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.splitWhen = exports.flatten = void 0;\r\nfunction flatten(items) {\r\n return items.reduce((collection, item) => [].concat(collection, item), []);\r\n}\r\nexports.flatten = flatten;\r\nfunction splitWhen(items, predicate) {\r\n const result = [[]];\r\n let groupIndex = 0;\r\n for (const item of items) {\r\n if (predicate(item)) {\r\n groupIndex++;\r\n result[groupIndex] = [];\r\n }\r\n else {\r\n result[groupIndex].push(item);\r\n }\r\n }\r\n return result;\r\n}\r\nexports.splitWhen = splitWhen;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEnoentCodeError = void 0;\r\nfunction isEnoentCodeError(error) {\r\n return error.code === 'ENOENT';\r\n}\r\nexports.isEnoentCodeError = isEnoentCodeError;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;\r\nconst array = require(\"./array\");\r\nexports.array = array;\r\nconst errno = require(\"./errno\");\r\nexports.errno = errno;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\nconst path = require(\"./path\");\r\nexports.path = path;\r\nconst pattern = require(\"./pattern\");\r\nexports.pattern = pattern;\r\nconst stream = require(\"./stream\");\r\nexports.stream = stream;\r\nconst string = require(\"./string\");\r\nexports.string = string;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;\r\nconst path = require(\"path\");\r\nconst LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\\\\r\nconst UNESCAPED_GLOB_SYMBOLS_RE = /(\\\\?)([()*?[\\]{|}]|^!|[!+@](?=\\())/g;\r\n/**\r\n * Designed to work only with simple paths: `dir\\\\file`.\r\n */\r\nfunction unixify(filepath) {\r\n return filepath.replace(/\\\\/g, '/');\r\n}\r\nexports.unixify = unixify;\r\nfunction makeAbsolute(cwd, filepath) {\r\n return path.resolve(cwd, filepath);\r\n}\r\nexports.makeAbsolute = makeAbsolute;\r\nfunction escape(pattern) {\r\n return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\\\$2');\r\n}\r\nexports.escape = escape;\r\nfunction removeLeadingDotSegment(entry) {\r\n // We do not use `startsWith` because this is 10x slower than current implementation for some cases.\r\n // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with\r\n if (entry.charAt(0) === '.') {\r\n const secondCharactery = entry.charAt(1);\r\n if (secondCharactery === '/' || secondCharactery === '\\\\') {\r\n return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);\r\n }\r\n }\r\n return entry;\r\n}\r\nexports.removeLeadingDotSegment = removeLeadingDotSegment;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;\r\nconst path = require(\"path\");\r\nconst globParent = require(\"glob-parent\");\r\nconst micromatch = require(\"micromatch\");\r\nconst picomatch = require(\"picomatch\");\r\nconst GLOBSTAR = '**';\r\nconst ESCAPE_SYMBOL = '\\\\';\r\nconst COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;\r\nconst REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\\[.*]/;\r\nconst REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\\(.*\\|.*\\)/;\r\nconst GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\\(.*\\)/;\r\nconst BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\\.\\.).*}/;\r\nfunction isStaticPattern(pattern, options = {}) {\r\n return !isDynamicPattern(pattern, options);\r\n}\r\nexports.isStaticPattern = isStaticPattern;\r\nfunction isDynamicPattern(pattern, options = {}) {\r\n /**\r\n * A special case with an empty string is necessary for matching patterns that start with a forward slash.\r\n * An empty string cannot be a dynamic pattern.\r\n * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.\r\n */\r\n if (pattern === '') {\r\n return false;\r\n }\r\n /**\r\n * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check\r\n * filepath directly (without read directory).\r\n */\r\n if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {\r\n return true;\r\n }\r\n if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n return false;\r\n}\r\nexports.isDynamicPattern = isDynamicPattern;\r\nfunction convertToPositivePattern(pattern) {\r\n return isNegativePattern(pattern) ? pattern.slice(1) : pattern;\r\n}\r\nexports.convertToPositivePattern = convertToPositivePattern;\r\nfunction convertToNegativePattern(pattern) {\r\n return '!' + pattern;\r\n}\r\nexports.convertToNegativePattern = convertToNegativePattern;\r\nfunction isNegativePattern(pattern) {\r\n return pattern.startsWith('!') && pattern[1] !== '(';\r\n}\r\nexports.isNegativePattern = isNegativePattern;\r\nfunction isPositivePattern(pattern) {\r\n return !isNegativePattern(pattern);\r\n}\r\nexports.isPositivePattern = isPositivePattern;\r\nfunction getNegativePatterns(patterns) {\r\n return patterns.filter(isNegativePattern);\r\n}\r\nexports.getNegativePatterns = getNegativePatterns;\r\nfunction getPositivePatterns(patterns) {\r\n return patterns.filter(isPositivePattern);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getBaseDirectory(pattern) {\r\n return globParent(pattern, { flipBackslashes: false });\r\n}\r\nexports.getBaseDirectory = getBaseDirectory;\r\nfunction hasGlobStar(pattern) {\r\n return pattern.includes(GLOBSTAR);\r\n}\r\nexports.hasGlobStar = hasGlobStar;\r\nfunction endsWithSlashGlobStar(pattern) {\r\n return pattern.endsWith('/' + GLOBSTAR);\r\n}\r\nexports.endsWithSlashGlobStar = endsWithSlashGlobStar;\r\nfunction isAffectDepthOfReadingPattern(pattern) {\r\n const basename = path.basename(pattern);\r\n return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);\r\n}\r\nexports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;\r\nfunction expandPatternsWithBraceExpansion(patterns) {\r\n return patterns.reduce((collection, pattern) => {\r\n return collection.concat(expandBraceExpansion(pattern));\r\n }, []);\r\n}\r\nexports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;\r\nfunction expandBraceExpansion(pattern) {\r\n return micromatch.braces(pattern, {\r\n expand: true,\r\n nodupes: true\r\n });\r\n}\r\nexports.expandBraceExpansion = expandBraceExpansion;\r\nfunction getPatternParts(pattern, options) {\r\n let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));\r\n /**\r\n * The scan method returns an empty array in some cases.\r\n * See micromatch/picomatch#58 for more details.\r\n */\r\n if (parts.length === 0) {\r\n parts = [pattern];\r\n }\r\n /**\r\n * The scan method does not return an empty part for the pattern with a forward slash.\r\n * This is another part of micromatch/picomatch#58.\r\n */\r\n if (parts[0].startsWith('/')) {\r\n parts[0] = parts[0].slice(1);\r\n parts.unshift('');\r\n }\r\n return parts;\r\n}\r\nexports.getPatternParts = getPatternParts;\r\nfunction makeRe(pattern, options) {\r\n return micromatch.makeRe(pattern, options);\r\n}\r\nexports.makeRe = makeRe;\r\nfunction convertPatternsToRe(patterns, options) {\r\n return patterns.map((pattern) => makeRe(pattern, options));\r\n}\r\nexports.convertPatternsToRe = convertPatternsToRe;\r\nfunction matchAny(entry, patternsRe) {\r\n return patternsRe.some((patternRe) => patternRe.test(entry));\r\n}\r\nexports.matchAny = matchAny;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.merge = void 0;\r\nconst merge2 = require(\"merge2\");\r\nfunction merge(streams) {\r\n const mergedStream = merge2(streams);\r\n streams.forEach((stream) => {\r\n stream.once('error', (error) => mergedStream.emit('error', error));\r\n });\r\n mergedStream.once('close', () => propagateCloseEventToSources(streams));\r\n mergedStream.once('end', () => propagateCloseEventToSources(streams));\r\n return mergedStream;\r\n}\r\nexports.merge = merge;\r\nfunction propagateCloseEventToSources(streams) {\r\n streams.forEach((stream) => stream.emit('close'));\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEmpty = exports.isString = void 0;\r\nfunction isString(input) {\r\n return typeof input === 'string';\r\n}\r\nexports.isString = isString;\r\nfunction isEmpty(input) {\r\n return input === '';\r\n}\r\nexports.isEmpty = isEmpty;\r\n","'use strict'\n\nvar reusify = require('reusify')\n\nfunction fastqueue (context, worker, concurrency) {\n if (typeof context === 'function') {\n concurrency = worker\n worker = context\n context = null\n }\n\n var cache = reusify(Task)\n var queueHead = null\n var queueTail = null\n var _running = 0\n\n var self = {\n push: push,\n drain: noop,\n saturated: noop,\n pause: pause,\n paused: false,\n concurrency: concurrency,\n running: running,\n resume: resume,\n idle: idle,\n length: length,\n getQueue: getQueue,\n unshift: unshift,\n empty: noop,\n kill: kill,\n killAndDrain: killAndDrain\n }\n\n return self\n\n function running () {\n return _running\n }\n\n function pause () {\n self.paused = true\n }\n\n function length () {\n var current = queueHead\n var counter = 0\n\n while (current) {\n current = current.next\n counter++\n }\n\n return counter\n }\n\n function getQueue () {\n var current = queueHead\n var tasks = []\n\n while (current) {\n tasks.push(current.value)\n current = current.next\n }\n\n return tasks\n }\n\n function resume () {\n if (!self.paused) return\n self.paused = false\n for (var i = 0; i < self.concurrency; i++) {\n _running++\n release()\n }\n }\n\n function idle () {\n return _running === 0 && self.length() === 0\n }\n\n function push (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n\n if (_running === self.concurrency || self.paused) {\n if (queueTail) {\n queueTail.next = current\n queueTail = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function unshift (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n\n if (_running === self.concurrency || self.paused) {\n if (queueHead) {\n current.next = queueHead\n queueHead = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function release (holder) {\n if (holder) {\n cache.release(holder)\n }\n var next = queueHead\n if (next) {\n if (!self.paused) {\n if (queueTail === queueHead) {\n queueTail = null\n }\n queueHead = next.next\n next.next = null\n worker.call(context, next.value, next.worked)\n if (queueTail === null) {\n self.empty()\n }\n } else {\n _running--\n }\n } else if (--_running === 0) {\n self.drain()\n }\n }\n\n function kill () {\n queueHead = null\n queueTail = null\n self.drain = noop\n }\n\n function killAndDrain () {\n queueHead = null\n queueTail = null\n self.drain()\n self.drain = noop\n }\n}\n\nfunction noop () {}\n\nfunction Task () {\n this.value = null\n this.callback = noop\n this.next = null\n this.release = noop\n this.context = null\n\n var self = this\n\n this.worked = function worked (err, result) {\n var callback = self.callback\n self.value = null\n self.callback = noop\n callback.call(self.context, err, result)\n self.release(self)\n }\n}\n\nmodule.exports = fastqueue\n","'use strict';\n\nvar isGlob = require('is-glob');\nvar pathPosixDirname = require('path').posix.dirname;\nvar isWin32 = require('os').platform() === 'win32';\n\nvar slash = '/';\nvar backslash = /\\\\/g;\nvar enclosure = /[\\{\\[].*[\\/]*.*[\\}\\]]$/;\nvar globby = /(^|[^\\\\])([\\{\\[]|\\([^\\)]+$)/;\nvar escaped = /\\\\([\\!\\*\\?\\|\\[\\]\\(\\)\\{\\}])/g;\n\n/**\n * @param {string} str\n * @param {Object} opts\n * @param {boolean} [opts.flipBackslashes=true]\n */\nmodule.exports = function globParent(str, opts) {\n var options = Object.assign({ flipBackslashes: true }, opts);\n\n // flip windows path separators\n if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {\n str = str.replace(backslash, slash);\n }\n\n // special case for strings ending in enclosure containing path separator\n if (enclosure.test(str)) {\n str += slash;\n }\n\n // preserves full path in case of trailing path separator\n str += 'a';\n\n // remove path parts that are globby\n do {\n str = pathPosixDirname(str);\n } while (isGlob(str) || globby.test(str));\n\n // remove escape chars and return result\n return str.replace(escaped, '$1');\n};\n","/*!\n * is-extglob \n *\n * Copyright (c) 2014-2016, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\nmodule.exports = function isExtglob(str) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n var match;\n while ((match = /(\\\\).|([@?!+*]\\(.*\\))/g.exec(str))) {\n if (match[2]) return true;\n str = str.slice(match.index + match[0].length);\n }\n\n return false;\n};\n","/*!\n * is-glob \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nvar isExtglob = require('is-extglob');\nvar chars = { '{': '}', '(': ')', '[': ']'};\nvar strictRegex = /\\\\(.)|(^!|\\*|[\\].+)]\\?|\\[[^\\\\\\]]+\\]|\\{[^\\\\}]+\\}|\\(\\?[:!=][^\\\\)]+\\)|\\([^|]+\\|[^\\\\)]+\\))/;\nvar relaxedRegex = /\\\\(.)|(^!|[*?{}()[\\]]|\\(\\?)/;\n\nmodule.exports = function isGlob(str, options) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n if (isExtglob(str)) {\n return true;\n }\n\n var regex = strictRegex;\n var match;\n\n // optionally relax regex\n if (options && options.strict === false) {\n regex = relaxedRegex;\n }\n\n while ((match = regex.exec(str))) {\n if (match[2]) return true;\n var idx = match.index + match[0].length;\n\n // if an open bracket/brace/paren is escaped,\n // set the index to the next closing character\n var open = match[1];\n var close = open ? chars[open] : null;\n if (open && close) {\n var n = str.indexOf(close, idx);\n if (n !== -1) {\n idx = n + 1;\n }\n }\n\n str = str.slice(idx);\n }\n return false;\n};\n","'use strict'\n/*\n * merge2\n * https://github.com/teambition/merge2\n *\n * Copyright (c) 2014-2020 Teambition\n * Licensed under the MIT license.\n */\nconst Stream = require('stream')\nconst PassThrough = Stream.PassThrough\nconst slice = Array.prototype.slice\n\nmodule.exports = merge2\n\nfunction merge2 () {\n const streamsQueue = []\n const args = slice.call(arguments)\n let merging = false\n let options = args[args.length - 1]\n\n if (options && !Array.isArray(options) && options.pipe == null) {\n args.pop()\n } else {\n options = {}\n }\n\n const doEnd = options.end !== false\n const doPipeError = options.pipeError === true\n if (options.objectMode == null) {\n options.objectMode = true\n }\n if (options.highWaterMark == null) {\n options.highWaterMark = 64 * 1024\n }\n const mergedStream = PassThrough(options)\n\n function addStream () {\n for (let i = 0, len = arguments.length; i < len; i++) {\n streamsQueue.push(pauseStreams(arguments[i], options))\n }\n mergeStream()\n return this\n }\n\n function mergeStream () {\n if (merging) {\n return\n }\n merging = true\n\n let streams = streamsQueue.shift()\n if (!streams) {\n process.nextTick(endStream)\n return\n }\n if (!Array.isArray(streams)) {\n streams = [streams]\n }\n\n let pipesCount = streams.length + 1\n\n function next () {\n if (--pipesCount > 0) {\n return\n }\n merging = false\n mergeStream()\n }\n\n function pipe (stream) {\n function onend () {\n stream.removeListener('merge2UnpipeEnd', onend)\n stream.removeListener('end', onend)\n if (doPipeError) {\n stream.removeListener('error', onerror)\n }\n next()\n }\n function onerror (err) {\n mergedStream.emit('error', err)\n }\n // skip ended stream\n if (stream._readableState.endEmitted) {\n return next()\n }\n\n stream.on('merge2UnpipeEnd', onend)\n stream.on('end', onend)\n\n if (doPipeError) {\n stream.on('error', onerror)\n }\n\n stream.pipe(mergedStream, { end: false })\n // compatible for old stream\n stream.resume()\n }\n\n for (let i = 0; i < streams.length; i++) {\n pipe(streams[i])\n }\n\n next()\n }\n\n function endStream () {\n merging = false\n // emit 'queueDrain' when all streams merged.\n mergedStream.emit('queueDrain')\n if (doEnd) {\n mergedStream.end()\n }\n }\n\n mergedStream.setMaxListeners(0)\n mergedStream.add = addStream\n mergedStream.on('unpipe', function (stream) {\n stream.emit('merge2UnpipeEnd')\n })\n\n if (args.length) {\n addStream.apply(null, args)\n }\n return mergedStream\n}\n\n// check and pause streams for pipe.\nfunction pauseStreams (streams, options) {\n if (!Array.isArray(streams)) {\n // Backwards-compat with old-style streams\n if (!streams._readableState && streams.pipe) {\n streams = streams.pipe(PassThrough(options))\n }\n if (!streams._readableState || !streams.pause || !streams.pipe) {\n throw new Error('Only readable stream can be merged.')\n }\n streams.pause()\n } else {\n for (let i = 0, len = streams.length; i < len; i++) {\n streams[i] = pauseStreams(streams[i], options)\n }\n }\n return streams\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nmodule.exports = require('./lib/picomatch');\n","'use strict';\n\nconst path = require('path');\nconst WIN_SLASH = '\\\\\\\\/';\nconst WIN_NO_SLASH = `[^${WIN_SLASH}]`;\n\n/**\n * Posix glob regex\n */\n\nconst DOT_LITERAL = '\\\\.';\nconst PLUS_LITERAL = '\\\\+';\nconst QMARK_LITERAL = '\\\\?';\nconst SLASH_LITERAL = '\\\\/';\nconst ONE_CHAR = '(?=.)';\nconst QMARK = '[^/]';\nconst END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;\nconst START_ANCHOR = `(?:^|${SLASH_LITERAL})`;\nconst DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;\nconst NO_DOT = `(?!${DOT_LITERAL})`;\nconst NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;\nconst NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;\nconst NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;\nconst QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;\nconst STAR = `${QMARK}*?`;\n\nconst POSIX_CHARS = {\n DOT_LITERAL,\n PLUS_LITERAL,\n QMARK_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n QMARK,\n END_ANCHOR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n};\n\n/**\n * Windows glob regex\n */\n\nconst WINDOWS_CHARS = {\n ...POSIX_CHARS,\n\n SLASH_LITERAL: `[${WIN_SLASH}]`,\n QMARK: WIN_NO_SLASH,\n STAR: `${WIN_NO_SLASH}*?`,\n DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,\n NO_DOT: `(?!${DOT_LITERAL})`,\n NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,\n NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n QMARK_NO_DOT: `[^.${WIN_SLASH}]`,\n START_ANCHOR: `(?:^|[${WIN_SLASH}])`,\n END_ANCHOR: `(?:[${WIN_SLASH}]|$)`\n};\n\n/**\n * POSIX Bracket Regex\n */\n\nconst POSIX_REGEX_SOURCE = {\n alnum: 'a-zA-Z0-9',\n alpha: 'a-zA-Z',\n ascii: '\\\\x00-\\\\x7F',\n blank: ' \\\\t',\n cntrl: '\\\\x00-\\\\x1F\\\\x7F',\n digit: '0-9',\n graph: '\\\\x21-\\\\x7E',\n lower: 'a-z',\n print: '\\\\x20-\\\\x7E ',\n punct: '\\\\-!\"#$%&\\'()\\\\*+,./:;<=>?@[\\\\]^_`{|}~',\n space: ' \\\\t\\\\r\\\\n\\\\v\\\\f',\n upper: 'A-Z',\n word: 'A-Za-z0-9_',\n xdigit: 'A-Fa-f0-9'\n};\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n POSIX_REGEX_SOURCE,\n\n // regular expressions\n REGEX_BACKSLASH: /\\\\(?![*+?^${}(|)[\\]])/g,\n REGEX_NON_SPECIAL_CHARS: /^[^@![\\].,$*+?^{}()|\\\\/]+/,\n REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\\]]/,\n REGEX_SPECIAL_CHARS_BACKREF: /(\\\\?)((\\W)(\\3*))/g,\n REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\\]])/g,\n REGEX_REMOVE_BACKSLASH: /(?:\\[.*?[^\\\\]\\]|\\\\(?=.))/g,\n\n // Replace globs with equivalent patterns to reduce parsing time.\n REPLACEMENTS: {\n '***': '*',\n '**/**': '**',\n '**/**/**': '**'\n },\n\n // Digits\n CHAR_0: 48, /* 0 */\n CHAR_9: 57, /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 65, /* A */\n CHAR_LOWERCASE_A: 97, /* a */\n CHAR_UPPERCASE_Z: 90, /* Z */\n CHAR_LOWERCASE_Z: 122, /* z */\n\n CHAR_LEFT_PARENTHESES: 40, /* ( */\n CHAR_RIGHT_PARENTHESES: 41, /* ) */\n\n CHAR_ASTERISK: 42, /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: 38, /* & */\n CHAR_AT: 64, /* @ */\n CHAR_BACKWARD_SLASH: 92, /* \\ */\n CHAR_CARRIAGE_RETURN: 13, /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */\n CHAR_COLON: 58, /* : */\n CHAR_COMMA: 44, /* , */\n CHAR_DOT: 46, /* . */\n CHAR_DOUBLE_QUOTE: 34, /* \" */\n CHAR_EQUAL: 61, /* = */\n CHAR_EXCLAMATION_MARK: 33, /* ! */\n CHAR_FORM_FEED: 12, /* \\f */\n CHAR_FORWARD_SLASH: 47, /* / */\n CHAR_GRAVE_ACCENT: 96, /* ` */\n CHAR_HASH: 35, /* # */\n CHAR_HYPHEN_MINUS: 45, /* - */\n CHAR_LEFT_ANGLE_BRACKET: 60, /* < */\n CHAR_LEFT_CURLY_BRACE: 123, /* { */\n CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */\n CHAR_LINE_FEED: 10, /* \\n */\n CHAR_NO_BREAK_SPACE: 160, /* \\u00A0 */\n CHAR_PERCENT: 37, /* % */\n CHAR_PLUS: 43, /* + */\n CHAR_QUESTION_MARK: 63, /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */\n CHAR_RIGHT_CURLY_BRACE: 125, /* } */\n CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */\n CHAR_SEMICOLON: 59, /* ; */\n CHAR_SINGLE_QUOTE: 39, /* ' */\n CHAR_SPACE: 32, /* */\n CHAR_TAB: 9, /* \\t */\n CHAR_UNDERSCORE: 95, /* _ */\n CHAR_VERTICAL_LINE: 124, /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \\uFEFF */\n\n SEP: path.sep,\n\n /**\n * Create EXTGLOB_CHARS\n */\n\n extglobChars(chars) {\n return {\n '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },\n '?': { type: 'qmark', open: '(?:', close: ')?' },\n '+': { type: 'plus', open: '(?:', close: ')+' },\n '*': { type: 'star', open: '(?:', close: ')*' },\n '@': { type: 'at', open: '(?:', close: ')' }\n };\n },\n\n /**\n * Create GLOB_CHARS\n */\n\n globChars(win32) {\n return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;\n }\n};\n","'use strict';\n\nconst constants = require('./constants');\nconst utils = require('./utils');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n POSIX_REGEX_SOURCE,\n REGEX_NON_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_BACKREF,\n REPLACEMENTS\n} = constants;\n\n/**\n * Helpers\n */\n\nconst expandRange = (args, options) => {\n if (typeof options.expandRange === 'function') {\n return options.expandRange(...args, options);\n }\n\n args.sort();\n const value = `[${args.join('-')}]`;\n\n try {\n /* eslint-disable-next-line no-new */\n new RegExp(value);\n } catch (ex) {\n return args.map(v => utils.escapeRegex(v)).join('..');\n }\n\n return value;\n};\n\n/**\n * Create the message for a syntax error\n */\n\nconst syntaxError = (type, char) => {\n return `Missing ${type}: \"${char}\" - use \"\\\\\\\\${char}\" to match literal characters`;\n};\n\n/**\n * Parse the given input string.\n * @param {String} input\n * @param {Object} options\n * @return {Object}\n */\n\nconst parse = (input, options) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n input = REPLACEMENTS[input] || input;\n\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n\n let len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n const bos = { type: 'bos', value: '', output: opts.prepend || '' };\n const tokens = [bos];\n\n const capture = opts.capture ? '' : '?:';\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const PLATFORM_CHARS = constants.globChars(win32);\n const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);\n\n const {\n DOT_LITERAL,\n PLUS_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n } = PLATFORM_CHARS;\n\n const globstar = (opts) => {\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const nodot = opts.dot ? '' : NO_DOT;\n const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;\n let star = opts.bash === true ? globstar(opts) : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n // minimatch options support\n if (typeof opts.noext === 'boolean') {\n opts.noextglob = opts.noext;\n }\n\n const state = {\n input,\n index: -1,\n start: 0,\n dot: opts.dot === true,\n consumed: '',\n output: '',\n prefix: '',\n backtrack: false,\n negated: false,\n brackets: 0,\n braces: 0,\n parens: 0,\n quotes: 0,\n globstar: false,\n tokens\n };\n\n input = utils.removePrefix(input, state);\n len = input.length;\n\n const extglobs = [];\n const braces = [];\n const stack = [];\n let prev = bos;\n let value;\n\n /**\n * Tokenizing helpers\n */\n\n const eos = () => state.index === len - 1;\n const peek = state.peek = (n = 1) => input[state.index + n];\n const advance = state.advance = () => input[++state.index];\n const remaining = () => input.slice(state.index + 1);\n const consume = (value = '', num = 0) => {\n state.consumed += value;\n state.index += num;\n };\n const append = token => {\n state.output += token.output != null ? token.output : token.value;\n consume(token.value);\n };\n\n const negate = () => {\n let count = 1;\n\n while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {\n advance();\n state.start++;\n count++;\n }\n\n if (count % 2 === 0) {\n return false;\n }\n\n state.negated = true;\n state.start++;\n return true;\n };\n\n const increment = type => {\n state[type]++;\n stack.push(type);\n };\n\n const decrement = type => {\n state[type]--;\n stack.pop();\n };\n\n /**\n * Push tokens onto the tokens array. This helper speeds up\n * tokenizing by 1) helping us avoid backtracking as much as possible,\n * and 2) helping us avoid creating extra tokens when consecutive\n * characters are plain text. This improves performance and simplifies\n * lookbehinds.\n */\n\n const push = tok => {\n if (prev.type === 'globstar') {\n const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');\n const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));\n\n if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {\n state.output = state.output.slice(0, -prev.output.length);\n prev.type = 'star';\n prev.value = '*';\n prev.output = star;\n state.output += prev.output;\n }\n }\n\n if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) {\n extglobs[extglobs.length - 1].inner += tok.value;\n }\n\n if (tok.value || tok.output) append(tok);\n if (prev && prev.type === 'text' && tok.type === 'text') {\n prev.value += tok.value;\n prev.output = (prev.output || '') + tok.value;\n return;\n }\n\n tok.prev = prev;\n tokens.push(tok);\n prev = tok;\n };\n\n const extglobOpen = (type, value) => {\n const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };\n\n token.prev = prev;\n token.parens = state.parens;\n token.output = state.output;\n const output = (opts.capture ? '(' : '') + token.open;\n\n increment('parens');\n push({ type, value, output: state.output ? '' : ONE_CHAR });\n push({ type: 'paren', extglob: true, value: advance(), output });\n extglobs.push(token);\n };\n\n const extglobClose = token => {\n let output = token.close + (opts.capture ? ')' : '');\n\n if (token.type === 'negate') {\n let extglobStar = star;\n\n if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {\n extglobStar = globstar(opts);\n }\n\n if (extglobStar !== star || eos() || /^\\)+$/.test(remaining())) {\n output = token.close = `)$))${extglobStar}`;\n }\n\n if (token.prev.type === 'bos' && eos()) {\n state.negatedExtglob = true;\n }\n }\n\n push({ type: 'paren', extglob: true, value, output });\n decrement('parens');\n };\n\n /**\n * Fast paths\n */\n\n if (opts.fastpaths !== false && !/(^[*!]|[/()[\\]{}\"])/.test(input)) {\n let backslashes = false;\n\n let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {\n if (first === '\\\\') {\n backslashes = true;\n return m;\n }\n\n if (first === '?') {\n if (esc) {\n return esc + first + (rest ? QMARK.repeat(rest.length) : '');\n }\n if (index === 0) {\n return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');\n }\n return QMARK.repeat(chars.length);\n }\n\n if (first === '.') {\n return DOT_LITERAL.repeat(chars.length);\n }\n\n if (first === '*') {\n if (esc) {\n return esc + first + (rest ? star : '');\n }\n return star;\n }\n return esc ? m : `\\\\${m}`;\n });\n\n if (backslashes === true) {\n if (opts.unescape === true) {\n output = output.replace(/\\\\/g, '');\n } else {\n output = output.replace(/\\\\+/g, m => {\n return m.length % 2 === 0 ? '\\\\\\\\' : (m ? '\\\\' : '');\n });\n }\n }\n\n if (output === input && opts.contains === true) {\n state.output = input;\n return state;\n }\n\n state.output = utils.wrapOutput(output, state, options);\n return state;\n }\n\n /**\n * Tokenize input until we reach end-of-string\n */\n\n while (!eos()) {\n value = advance();\n\n if (value === '\\u0000') {\n continue;\n }\n\n /**\n * Escaped characters\n */\n\n if (value === '\\\\') {\n const next = peek();\n\n if (next === '/' && opts.bash !== true) {\n continue;\n }\n\n if (next === '.' || next === ';') {\n continue;\n }\n\n if (!next) {\n value += '\\\\';\n push({ type: 'text', value });\n continue;\n }\n\n // collapse slashes to reduce potential for exploits\n const match = /^\\\\+/.exec(remaining());\n let slashes = 0;\n\n if (match && match[0].length > 2) {\n slashes = match[0].length;\n state.index += slashes;\n if (slashes % 2 !== 0) {\n value += '\\\\';\n }\n }\n\n if (opts.unescape === true) {\n value = advance() || '';\n } else {\n value += advance() || '';\n }\n\n if (state.brackets === 0) {\n push({ type: 'text', value });\n continue;\n }\n }\n\n /**\n * If we're inside a regex character class, continue\n * until we reach the closing bracket.\n */\n\n if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {\n if (opts.posix !== false && value === ':') {\n const inner = prev.value.slice(1);\n if (inner.includes('[')) {\n prev.posix = true;\n\n if (inner.includes(':')) {\n const idx = prev.value.lastIndexOf('[');\n const pre = prev.value.slice(0, idx);\n const rest = prev.value.slice(idx + 2);\n const posix = POSIX_REGEX_SOURCE[rest];\n if (posix) {\n prev.value = pre + posix;\n state.backtrack = true;\n advance();\n\n if (!bos.output && tokens.indexOf(prev) === 1) {\n bos.output = ONE_CHAR;\n }\n continue;\n }\n }\n }\n }\n\n if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {\n value = `\\\\${value}`;\n }\n\n if (value === ']' && (prev.value === '[' || prev.value === '[^')) {\n value = `\\\\${value}`;\n }\n\n if (opts.posix === true && value === '!' && prev.value === '[') {\n value = '^';\n }\n\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * If we're inside a quoted string, continue\n * until we reach the closing double quote.\n */\n\n if (state.quotes === 1 && value !== '\"') {\n value = utils.escapeRegex(value);\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * Double quotes\n */\n\n if (value === '\"') {\n state.quotes = state.quotes === 1 ? 0 : 1;\n if (opts.keepQuotes === true) {\n push({ type: 'text', value });\n }\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === '(') {\n increment('parens');\n push({ type: 'paren', value });\n continue;\n }\n\n if (value === ')') {\n if (state.parens === 0 && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '('));\n }\n\n const extglob = extglobs[extglobs.length - 1];\n if (extglob && state.parens === extglob.parens + 1) {\n extglobClose(extglobs.pop());\n continue;\n }\n\n push({ type: 'paren', value, output: state.parens ? ')' : '\\\\)' });\n decrement('parens');\n continue;\n }\n\n /**\n * Square brackets\n */\n\n if (value === '[') {\n if (opts.nobracket === true || !remaining().includes(']')) {\n if (opts.nobracket !== true && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('closing', ']'));\n }\n\n value = `\\\\${value}`;\n } else {\n increment('brackets');\n }\n\n push({ type: 'bracket', value });\n continue;\n }\n\n if (value === ']') {\n if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n if (state.brackets === 0) {\n if (opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '['));\n }\n\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n decrement('brackets');\n\n const prevValue = prev.value.slice(1);\n if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {\n value = `/${value}`;\n }\n\n prev.value += value;\n append({ value });\n\n // when literal brackets are explicitly disabled\n // assume we should match with a regex character class\n if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {\n continue;\n }\n\n const escaped = utils.escapeRegex(prev.value);\n state.output = state.output.slice(0, -prev.value.length);\n\n // when literal brackets are explicitly enabled\n // assume we should escape the brackets to match literal characters\n if (opts.literalBrackets === true) {\n state.output += escaped;\n prev.value = escaped;\n continue;\n }\n\n // when the user specifies nothing, try to match both\n prev.value = `(${capture}${escaped}|${prev.value})`;\n state.output += prev.value;\n continue;\n }\n\n /**\n * Braces\n */\n\n if (value === '{' && opts.nobrace !== true) {\n increment('braces');\n\n const open = {\n type: 'brace',\n value,\n output: '(',\n outputIndex: state.output.length,\n tokensIndex: state.tokens.length\n };\n\n braces.push(open);\n push(open);\n continue;\n }\n\n if (value === '}') {\n const brace = braces[braces.length - 1];\n\n if (opts.nobrace === true || !brace) {\n push({ type: 'text', value, output: value });\n continue;\n }\n\n let output = ')';\n\n if (brace.dots === true) {\n const arr = tokens.slice();\n const range = [];\n\n for (let i = arr.length - 1; i >= 0; i--) {\n tokens.pop();\n if (arr[i].type === 'brace') {\n break;\n }\n if (arr[i].type !== 'dots') {\n range.unshift(arr[i].value);\n }\n }\n\n output = expandRange(range, opts);\n state.backtrack = true;\n }\n\n if (brace.comma !== true && brace.dots !== true) {\n const out = state.output.slice(0, brace.outputIndex);\n const toks = state.tokens.slice(brace.tokensIndex);\n brace.value = brace.output = '\\\\{';\n value = output = '\\\\}';\n state.output = out;\n for (const t of toks) {\n state.output += (t.output || t.value);\n }\n }\n\n push({ type: 'brace', value, output });\n decrement('braces');\n braces.pop();\n continue;\n }\n\n /**\n * Pipes\n */\n\n if (value === '|') {\n if (extglobs.length > 0) {\n extglobs[extglobs.length - 1].conditions++;\n }\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Commas\n */\n\n if (value === ',') {\n let output = value;\n\n const brace = braces[braces.length - 1];\n if (brace && stack[stack.length - 1] === 'braces') {\n brace.comma = true;\n output = '|';\n }\n\n push({ type: 'comma', value, output });\n continue;\n }\n\n /**\n * Slashes\n */\n\n if (value === '/') {\n // if the beginning of the glob is \"./\", advance the start\n // to the current index, and don't add the \"./\" characters\n // to the state. This greatly simplifies lookbehinds when\n // checking for BOS characters like \"!\" and \".\" (not \"./\")\n if (prev.type === 'dot' && state.index === state.start + 1) {\n state.start = state.index + 1;\n state.consumed = '';\n state.output = '';\n tokens.pop();\n prev = bos; // reset \"prev\" to the first token\n continue;\n }\n\n push({ type: 'slash', value, output: SLASH_LITERAL });\n continue;\n }\n\n /**\n * Dots\n */\n\n if (value === '.') {\n if (state.braces > 0 && prev.type === 'dot') {\n if (prev.value === '.') prev.output = DOT_LITERAL;\n const brace = braces[braces.length - 1];\n prev.type = 'dots';\n prev.output += value;\n prev.value += value;\n brace.dots = true;\n continue;\n }\n\n if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {\n push({ type: 'text', value, output: DOT_LITERAL });\n continue;\n }\n\n push({ type: 'dot', value, output: DOT_LITERAL });\n continue;\n }\n\n /**\n * Question marks\n */\n\n if (value === '?') {\n const isGroup = prev && prev.value === '(';\n if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('qmark', value);\n continue;\n }\n\n if (prev && prev.type === 'paren') {\n const next = peek();\n let output = value;\n\n if (next === '<' && !utils.supportsLookbehinds()) {\n throw new Error('Node.js v10 or higher is required for regex lookbehinds');\n }\n\n if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\\w+>)/.test(remaining()))) {\n output = `\\\\${value}`;\n }\n\n push({ type: 'text', value, output });\n continue;\n }\n\n if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {\n push({ type: 'qmark', value, output: QMARK_NO_DOT });\n continue;\n }\n\n push({ type: 'qmark', value, output: QMARK });\n continue;\n }\n\n /**\n * Exclamation\n */\n\n if (value === '!') {\n if (opts.noextglob !== true && peek() === '(') {\n if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {\n extglobOpen('negate', value);\n continue;\n }\n }\n\n if (opts.nonegate !== true && state.index === 0) {\n negate();\n continue;\n }\n }\n\n /**\n * Plus\n */\n\n if (value === '+') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('plus', value);\n continue;\n }\n\n if ((prev && prev.value === '(') || opts.regex === false) {\n push({ type: 'plus', value, output: PLUS_LITERAL });\n continue;\n }\n\n if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {\n push({ type: 'plus', value });\n continue;\n }\n\n push({ type: 'plus', value: PLUS_LITERAL });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value === '@') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n push({ type: 'at', extglob: true, value, output: '' });\n continue;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value !== '*') {\n if (value === '$' || value === '^') {\n value = `\\\\${value}`;\n }\n\n const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());\n if (match) {\n value += match[0];\n state.index += match[0].length;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Stars\n */\n\n if (prev && (prev.type === 'globstar' || prev.star === true)) {\n prev.type = 'star';\n prev.star = true;\n prev.value += value;\n prev.output = star;\n state.backtrack = true;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n let rest = remaining();\n if (opts.noextglob !== true && /^\\([^?]/.test(rest)) {\n extglobOpen('star', value);\n continue;\n }\n\n if (prev.type === 'star') {\n if (opts.noglobstar === true) {\n consume(value);\n continue;\n }\n\n const prior = prev.prev;\n const before = prior.prev;\n const isStart = prior.type === 'slash' || prior.type === 'bos';\n const afterStar = before && (before.type === 'star' || before.type === 'globstar');\n\n if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');\n const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');\n if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n // strip consecutive `/**/`\n while (rest.slice(0, 3) === '/**') {\n const after = input[state.index + 4];\n if (after && after !== '/') {\n break;\n }\n rest = rest.slice(3);\n consume('/**', 3);\n }\n\n if (prior.type === 'bos' && eos()) {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = globstar(opts);\n state.output = prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');\n prev.value += value;\n state.globstar = true;\n state.output += prior.output + prev.output;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {\n const end = rest[1] !== void 0 ? '|$' : '';\n\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;\n prev.value += value;\n\n state.output += prior.output + prev.output;\n state.globstar = true;\n\n consume(value + advance());\n\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n if (prior.type === 'bos' && rest[0] === '/') {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;\n state.output = prev.output;\n state.globstar = true;\n consume(value + advance());\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n // remove single star from output\n state.output = state.output.slice(0, -prev.output.length);\n\n // reset previous token to globstar\n prev.type = 'globstar';\n prev.output = globstar(opts);\n prev.value += value;\n\n // reset output with globstar\n state.output += prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n const token = { type: 'star', value, output: star };\n\n if (opts.bash === true) {\n token.output = '.*?';\n if (prev.type === 'bos' || prev.type === 'slash') {\n token.output = nodot + token.output;\n }\n push(token);\n continue;\n }\n\n if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {\n token.output = value;\n push(token);\n continue;\n }\n\n if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {\n if (prev.type === 'dot') {\n state.output += NO_DOT_SLASH;\n prev.output += NO_DOT_SLASH;\n\n } else if (opts.dot === true) {\n state.output += NO_DOTS_SLASH;\n prev.output += NO_DOTS_SLASH;\n\n } else {\n state.output += nodot;\n prev.output += nodot;\n }\n\n if (peek() !== '*') {\n state.output += ONE_CHAR;\n prev.output += ONE_CHAR;\n }\n }\n\n push(token);\n }\n\n while (state.brackets > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));\n state.output = utils.escapeLast(state.output, '[');\n decrement('brackets');\n }\n\n while (state.parens > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));\n state.output = utils.escapeLast(state.output, '(');\n decrement('parens');\n }\n\n while (state.braces > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));\n state.output = utils.escapeLast(state.output, '{');\n decrement('braces');\n }\n\n if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {\n push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });\n }\n\n // rebuild the output if we had to backtrack at any point\n if (state.backtrack === true) {\n state.output = '';\n\n for (const token of state.tokens) {\n state.output += token.output != null ? token.output : token.value;\n\n if (token.suffix) {\n state.output += token.suffix;\n }\n }\n }\n\n return state;\n};\n\n/**\n * Fast paths for creating regular expressions for common glob patterns.\n * This can significantly speed up processing and has very little downside\n * impact when none of the fast paths match.\n */\n\nparse.fastpaths = (input, options) => {\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n const len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n input = REPLACEMENTS[input] || input;\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const {\n DOT_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOTS_SLASH,\n STAR,\n START_ANCHOR\n } = constants.globChars(win32);\n\n const nodot = opts.dot ? NO_DOTS : NO_DOT;\n const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;\n const capture = opts.capture ? '' : '?:';\n const state = { negated: false, prefix: '' };\n let star = opts.bash === true ? '.*?' : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n const globstar = (opts) => {\n if (opts.noglobstar === true) return star;\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const create = str => {\n switch (str) {\n case '*':\n return `${nodot}${ONE_CHAR}${star}`;\n\n case '.*':\n return `${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*.*':\n return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*/*':\n return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;\n\n case '**':\n return nodot + globstar(opts);\n\n case '**/*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;\n\n case '**/*.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '**/.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n default: {\n const match = /^(.*?)\\.(\\w+)$/.exec(str);\n if (!match) return;\n\n const source = create(match[1]);\n if (!source) return;\n\n return source + DOT_LITERAL + match[2];\n }\n }\n };\n\n const output = utils.removePrefix(input, state);\n let source = create(output);\n\n if (source && opts.strictSlashes !== true) {\n source += `${SLASH_LITERAL}?`;\n }\n\n return source;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst path = require('path');\nconst scan = require('./scan');\nconst parse = require('./parse');\nconst utils = require('./utils');\nconst constants = require('./constants');\nconst isObject = val => val && typeof val === 'object' && !Array.isArray(val);\n\n/**\n * Creates a matcher function from one or more glob patterns. The\n * returned function takes a string to match as its first argument,\n * and returns true if the string is a match. The returned matcher\n * function also takes a boolean as the second argument that, when true,\n * returns an object with additional information.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch(glob[, options]);\n *\n * const isMatch = picomatch('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @name picomatch\n * @param {String|Array} `globs` One or more glob patterns.\n * @param {Object=} `options`\n * @return {Function=} Returns a matcher function.\n * @api public\n */\n\nconst picomatch = (glob, options, returnState = false) => {\n if (Array.isArray(glob)) {\n const fns = glob.map(input => picomatch(input, options, returnState));\n const arrayMatcher = str => {\n for (const isMatch of fns) {\n const state = isMatch(str);\n if (state) return state;\n }\n return false;\n };\n return arrayMatcher;\n }\n\n const isState = isObject(glob) && glob.tokens && glob.input;\n\n if (glob === '' || (typeof glob !== 'string' && !isState)) {\n throw new TypeError('Expected pattern to be a non-empty string');\n }\n\n const opts = options || {};\n const posix = utils.isWindows(options);\n const regex = isState\n ? picomatch.compileRe(glob, options)\n : picomatch.makeRe(glob, options, false, true);\n\n const state = regex.state;\n delete regex.state;\n\n let isIgnored = () => false;\n if (opts.ignore) {\n const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };\n isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);\n }\n\n const matcher = (input, returnObject = false) => {\n const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });\n const result = { glob, state, regex, posix, input, output, match, isMatch };\n\n if (typeof opts.onResult === 'function') {\n opts.onResult(result);\n }\n\n if (isMatch === false) {\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (isIgnored(input)) {\n if (typeof opts.onIgnore === 'function') {\n opts.onIgnore(result);\n }\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (typeof opts.onMatch === 'function') {\n opts.onMatch(result);\n }\n return returnObject ? result : true;\n };\n\n if (returnState) {\n matcher.state = state;\n }\n\n return matcher;\n};\n\n/**\n * Test `input` with the given `regex`. This is used by the main\n * `picomatch()` function to test the input string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.test(input, regex[, options]);\n *\n * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\\/([^/]*?))$/));\n * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp} `regex`\n * @return {Object} Returns an object with matching info.\n * @api public\n */\n\npicomatch.test = (input, regex, options, { glob, posix } = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected input to be a string');\n }\n\n if (input === '') {\n return { isMatch: false, output: '' };\n }\n\n const opts = options || {};\n const format = opts.format || (posix ? utils.toPosixSlashes : null);\n let match = input === glob;\n let output = (match && format) ? format(input) : input;\n\n if (match === false) {\n output = format ? format(input) : input;\n match = output === glob;\n }\n\n if (match === false || opts.capture === true) {\n if (opts.matchBase === true || opts.basename === true) {\n match = picomatch.matchBase(input, regex, options, posix);\n } else {\n match = regex.exec(output);\n }\n }\n\n return { isMatch: Boolean(match), match, output };\n};\n\n/**\n * Match the basename of a filepath.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.matchBase(input, glob[, options]);\n * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).\n * @return {Boolean}\n * @api public\n */\n\npicomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => {\n const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);\n return regex.test(path.basename(input));\n};\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.isMatch(string, patterns[, options]);\n *\n * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String|Array} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\npicomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const result = picomatch.parse(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as a regex source string.\n * @api public\n */\n\npicomatch.parse = (pattern, options) => {\n if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));\n return parse(pattern, { ...options, fastpaths: false });\n};\n\n/**\n * Scan a glob pattern to separate the pattern into segments.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.scan(input[, options]);\n *\n * const result = picomatch.scan('!./foo/*.js');\n * console.log(result);\n * { prefix: '!./',\n * input: '!./foo/*.js',\n * start: 3,\n * base: 'foo',\n * glob: '*.js',\n * isBrace: false,\n * isBracket: false,\n * isGlob: true,\n * isExtglob: false,\n * isGlobstar: false,\n * negated: true }\n * ```\n * @param {String} `input` Glob pattern to scan.\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\npicomatch.scan = (input, options) => scan(input, options);\n\n/**\n * Create a regular expression from a parsed glob pattern.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const state = picomatch.parse('*.js');\n * // picomatch.compileRe(state[, options]);\n *\n * console.log(picomatch.compileRe(state));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `state` The object returned from the `.parse` method.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\npicomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => {\n if (returnOutput === true) {\n return parsed.output;\n }\n\n const opts = options || {};\n const prepend = opts.contains ? '' : '^';\n const append = opts.contains ? '' : '$';\n\n let source = `${prepend}(?:${parsed.output})${append}`;\n if (parsed && parsed.negated === true) {\n source = `^(?!${source}).*$`;\n }\n\n const regex = picomatch.toRegex(source, options);\n if (returnState === true) {\n regex.state = parsed;\n }\n\n return regex;\n};\n\npicomatch.makeRe = (input, options, returnOutput = false, returnState = false) => {\n if (!input || typeof input !== 'string') {\n throw new TypeError('Expected a non-empty string');\n }\n\n const opts = options || {};\n let parsed = { negated: false, fastpaths: true };\n let prefix = '';\n let output;\n\n if (input.startsWith('./')) {\n input = input.slice(2);\n prefix = parsed.prefix = './';\n }\n\n if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {\n output = parse.fastpaths(input, options);\n }\n\n if (output === undefined) {\n parsed = parse(input, options);\n parsed.prefix = prefix + (parsed.prefix || '');\n } else {\n parsed.output = output;\n }\n\n return picomatch.compileRe(parsed, options, returnOutput, returnState);\n};\n\n/**\n * Create a regular expression from the given regex source string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.toRegex(source[, options]);\n *\n * const { output } = picomatch.parse('*.js');\n * console.log(picomatch.toRegex(output));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `source` Regular expression source string.\n * @param {Object} `options`\n * @return {RegExp}\n * @api public\n */\n\npicomatch.toRegex = (source, options) => {\n try {\n const opts = options || {};\n return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));\n } catch (err) {\n if (options && options.debug === true) throw err;\n return /$^/;\n }\n};\n\n/**\n * Picomatch constants.\n * @return {Object}\n */\n\npicomatch.constants = constants;\n\n/**\n * Expose \"picomatch\"\n */\n\nmodule.exports = picomatch;\n","'use strict';\n\nconst utils = require('./utils');\nconst {\n CHAR_ASTERISK, /* * */\n CHAR_AT, /* @ */\n CHAR_BACKWARD_SLASH, /* \\ */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_EXCLAMATION_MARK, /* ! */\n CHAR_FORWARD_SLASH, /* / */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_PLUS, /* + */\n CHAR_QUESTION_MARK, /* ? */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_RIGHT_SQUARE_BRACKET /* ] */\n} = require('./constants');\n\nconst isPathSeparator = code => {\n return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;\n};\n\nconst depth = token => {\n if (token.isPrefix !== true) {\n token.depth = token.isGlobstar ? Infinity : 1;\n }\n};\n\n/**\n * Quickly scans a glob pattern and returns an object with a handful of\n * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),\n * `glob` (the actual pattern), and `negated` (true if the path starts with `!`).\n *\n * ```js\n * const pm = require('picomatch');\n * console.log(pm.scan('foo/bar/*.js'));\n * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {Object} Returns an object with tokens and regex source string.\n * @api public\n */\n\nconst scan = (input, options) => {\n const opts = options || {};\n\n const length = input.length - 1;\n const scanToEnd = opts.parts === true || opts.scanToEnd === true;\n const slashes = [];\n const tokens = [];\n const parts = [];\n\n let str = input;\n let index = -1;\n let start = 0;\n let lastIndex = 0;\n let isBrace = false;\n let isBracket = false;\n let isGlob = false;\n let isExtglob = false;\n let isGlobstar = false;\n let braceEscaped = false;\n let backslashes = false;\n let negated = false;\n let finished = false;\n let braces = 0;\n let prev;\n let code;\n let token = { value: '', depth: 0, isGlob: false };\n\n const eos = () => index >= length;\n const peek = () => str.charCodeAt(index + 1);\n const advance = () => {\n prev = code;\n return str.charCodeAt(++index);\n };\n\n while (index < length) {\n code = advance();\n let next;\n\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braceEscaped = true;\n }\n continue;\n }\n\n if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n continue;\n }\n\n if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (braceEscaped !== true && code === CHAR_COMMA) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_RIGHT_CURLY_BRACE) {\n braces--;\n\n if (braces === 0) {\n braceEscaped = false;\n isBrace = token.isBrace = true;\n finished = true;\n break;\n }\n }\n }\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_FORWARD_SLASH) {\n slashes.push(index);\n tokens.push(token);\n token = { value: '', depth: 0, isGlob: false };\n\n if (finished === true) continue;\n if (prev === CHAR_DOT && index === (start + 1)) {\n start += 2;\n continue;\n }\n\n lastIndex = index + 1;\n continue;\n }\n\n if (opts.noext !== true) {\n const isExtglobChar = code === CHAR_PLUS\n || code === CHAR_AT\n || code === CHAR_ASTERISK\n || code === CHAR_QUESTION_MARK\n || code === CHAR_EXCLAMATION_MARK;\n\n if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n isExtglob = token.isExtglob = true;\n finished = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n isGlob = token.isGlob = true;\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n }\n\n if (code === CHAR_ASTERISK) {\n if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_QUESTION_MARK) {\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_LEFT_SQUARE_BRACKET) {\n while (eos() !== true && (next = advance())) {\n if (next === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n isBracket = token.isBracket = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n }\n }\n\n if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {\n negated = token.negated = true;\n start++;\n continue;\n }\n\n if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_LEFT_PARENTHESES) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n\n if (isGlob === true) {\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n }\n\n if (opts.noext === true) {\n isExtglob = false;\n isGlob = false;\n }\n\n let base = str;\n let prefix = '';\n let glob = '';\n\n if (start > 0) {\n prefix = str.slice(0, start);\n str = str.slice(start);\n lastIndex -= start;\n }\n\n if (base && isGlob === true && lastIndex > 0) {\n base = str.slice(0, lastIndex);\n glob = str.slice(lastIndex);\n } else if (isGlob === true) {\n base = '';\n glob = str;\n } else {\n base = str;\n }\n\n if (base && base !== '' && base !== '/' && base !== str) {\n if (isPathSeparator(base.charCodeAt(base.length - 1))) {\n base = base.slice(0, -1);\n }\n }\n\n if (opts.unescape === true) {\n if (glob) glob = utils.removeBackslashes(glob);\n\n if (base && backslashes === true) {\n base = utils.removeBackslashes(base);\n }\n }\n\n const state = {\n prefix,\n input,\n start,\n base,\n glob,\n isBrace,\n isBracket,\n isGlob,\n isExtglob,\n isGlobstar,\n negated\n };\n\n if (opts.tokens === true) {\n state.maxDepth = 0;\n if (!isPathSeparator(code)) {\n tokens.push(token);\n }\n state.tokens = tokens;\n }\n\n if (opts.parts === true || opts.tokens === true) {\n let prevIndex;\n\n for (let idx = 0; idx < slashes.length; idx++) {\n const n = prevIndex ? prevIndex + 1 : start;\n const i = slashes[idx];\n const value = input.slice(n, i);\n if (opts.tokens) {\n if (idx === 0 && start !== 0) {\n tokens[idx].isPrefix = true;\n tokens[idx].value = prefix;\n } else {\n tokens[idx].value = value;\n }\n depth(tokens[idx]);\n state.maxDepth += tokens[idx].depth;\n }\n if (idx !== 0 || value !== '') {\n parts.push(value);\n }\n prevIndex = i;\n }\n\n if (prevIndex && prevIndex + 1 < input.length) {\n const value = input.slice(prevIndex + 1);\n parts.push(value);\n\n if (opts.tokens) {\n tokens[tokens.length - 1].value = value;\n depth(tokens[tokens.length - 1]);\n state.maxDepth += tokens[tokens.length - 1].depth;\n }\n }\n\n state.slashes = slashes;\n state.parts = parts;\n }\n\n return state;\n};\n\nmodule.exports = scan;\n","'use strict';\n\nconst path = require('path');\nconst win32 = process.platform === 'win32';\nconst {\n REGEX_BACKSLASH,\n REGEX_REMOVE_BACKSLASH,\n REGEX_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_GLOBAL\n} = require('./constants');\n\nexports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\nexports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);\nexports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);\nexports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\\\$1');\nexports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');\n\nexports.removeBackslashes = str => {\n return str.replace(REGEX_REMOVE_BACKSLASH, match => {\n return match === '\\\\' ? '' : match;\n });\n};\n\nexports.supportsLookbehinds = () => {\n const segs = process.version.slice(1).split('.').map(Number);\n if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {\n return true;\n }\n return false;\n};\n\nexports.isWindows = options => {\n if (options && typeof options.windows === 'boolean') {\n return options.windows;\n }\n return win32 === true || path.sep === '\\\\';\n};\n\nexports.escapeLast = (input, char, lastIdx) => {\n const idx = input.lastIndexOf(char, lastIdx);\n if (idx === -1) return input;\n if (input[idx - 1] === '\\\\') return exports.escapeLast(input, char, idx - 1);\n return `${input.slice(0, idx)}\\\\${input.slice(idx)}`;\n};\n\nexports.removePrefix = (input, state = {}) => {\n let output = input;\n if (output.startsWith('./')) {\n output = output.slice(2);\n state.prefix = './';\n }\n return output;\n};\n\nexports.wrapOutput = (input, state = {}, options = {}) => {\n const prepend = options.contains ? '' : '^';\n const append = options.contains ? '' : '$';\n\n let output = `${prepend}(?:${input})${append}`;\n if (state.negated === true) {\n output = `(?:^(?!${output}).*$)`;\n }\n return output;\n};\n","'use strict'\n\nfunction reusify (Constructor) {\n var head = new Constructor()\n var tail = head\n\n function get () {\n var current = head\n\n if (current.next) {\n head = current.next\n } else {\n head = new Constructor()\n tail = head\n }\n\n current.next = null\n\n return current\n }\n\n function release (obj) {\n tail.next = obj\n tail = obj\n }\n\n return {\n get: get,\n release: release\n }\n}\n\nmodule.exports = reusify\n","module.exports = runParallel\n\nfunction runParallel (tasks, cb) {\n var results, pending, keys\n var isSync = true\n\n if (Array.isArray(tasks)) {\n results = []\n pending = tasks.length\n } else {\n keys = Object.keys(tasks)\n results = {}\n pending = keys.length\n }\n\n function done (err) {\n function end () {\n if (cb) cb(err, results)\n cb = null\n }\n if (isSync) process.nextTick(end)\n else end()\n }\n\n function each (i, err, result) {\n results[i] = result\n if (--pending === 0 || err) {\n done(err)\n }\n }\n\n if (!pending) {\n // empty\n done(null)\n } else if (keys) {\n // object\n keys.forEach(function (key) {\n tasks[key](function (err, result) { each(key, err, result) })\n })\n } else {\n // array\n tasks.forEach(function (task, i) {\n task(function (err, result) { each(i, err, result) })\n })\n }\n\n isSync = false\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //